ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gna...@apache.org
Subject [2/2] ambari git commit: AMBARI-18208. Bug Fixing in HueMIgration View. (Pradarttana Panda via gauravn7)
Date Sun, 21 Aug 2016 15:15:26 GMT
AMBARI-18208. Bug Fixing in HueMIgration View. (Pradarttana Panda via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7a34fe1d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7a34fe1d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7a34fe1d

Branch: refs/heads/trunk
Commit: 7a34fe1d80897d5db7acb0ac276fb270b3baa3b9
Parents: 7494549
Author: Gaurav Nagar <gnagar@hortonworks.com>
Authored: Sun Aug 21 20:43:55 2016 +0530
Committer: Gaurav Nagar <gnagar@hortonworks.com>
Committed: Sun Aug 21 20:45:02 2016 +0530

----------------------------------------------------------------------
 .../historyqueryset/MysqlQuerySetAmbariDB.java  |  20 ++-
 .../historyqueryset/OracleQuerySetAmbariDB.java |  19 ++-
 .../hive/historyqueryset/QuerySetAmbariDB.java  |  68 ++++++++--
 .../hive/instancedetail/QuerySetAmbariDB.java   |   4 +-
 .../savedqueryset/MysqlQuerySetAmbariDB.java    |  16 ++-
 .../savedqueryset/OracleQuerySetAmbariDB.java   |  12 +-
 .../hive/savedqueryset/QuerySetAmbariDB.java    |  37 ++++--
 .../pig/instancedetail/QuerySetAmbariDB.java    |   2 +-
 .../pig/jobqueryset/MysqlQuerySetAmbariDB.java  |  10 +-
 .../pig/jobqueryset/OracleQuerySetAmbariDB.java |  10 +-
 .../pig/jobqueryset/QuerySetAmbariDB.java       |  37 ++++--
 .../MysqlQuerySetAmbariDB.java                  |  10 +-
 .../OracleQuerySetAmbariDB.java                 |  18 ++-
 .../savedscriptqueryset/QuerySetAmbariDB.java   |  31 ++++-
 .../HiveHistoryMigrationUtility.java            |  32 +++--
 ...HiveHistoryQueryMigrationImplementation.java |  94 +++++++++++---
 .../HiveSavedQueryMigrationImplementation.java  |  82 ++++++++----
 .../HiveSavedQueryMigrationUtility.java         | 103 ++++++---------
 .../hive/savedquery/HiveSavedQueryStartJob.java |   2 -
 .../pigjob/PigJobMigrationImplementation.java   |  83 ++++++++----
 .../pig/pigjob/PigJobMigrationUtility.java      | 106 +++++++---------
 .../pig/pigscript/PigSavedScriptStartJob.java   |   2 -
 .../PigScriptMigrationImplementation.java       | 125 +++++++++++++++++--
 .../pigscript/PigScriptMigrationUtility.java    |  32 +++--
 .../scripts/models/MigrationModel.java          |  18 +--
 .../app/models/checkprogress.js                 |   2 +-
 .../app/models/returnjobid.js                   |   2 +-
 .../app/models/startmigration.js                |   2 +-
 .../app/routes/home-page/hive-history.js        |  11 +-
 .../app/routes/home-page/hive-saved-query.js    |   8 +-
 .../app/routes/home-page/pig-job.js             |   8 +-
 .../app/routes/home-page/pig-script.js          |   8 +-
 .../app/routes/home-page/revert-change.js       |  10 +-
 .../src/main/resources/view.xml                 |   9 +-
 34 files changed, 735 insertions(+), 298 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
index c08f009..4c6bffa 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
@@ -30,12 +30,15 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
 
   @Override
   protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.%hive%.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+  @Override
+  protected String getSqlInsertHiveHistoryForHive(int id) {
+    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','','',?,?,?,'','job','','','UNKNOWN',?,'','Worksheet');";
   }
-
   @Override
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  protected String getSqlInsertHiveHistoryForHiveNext(int id) {
+    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','','','',?,?,?,'','job','','','UNKNOWN',?,'','Worksheet');";
   }
 
   @Override
@@ -43,4 +46,13 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
     return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
   }
 
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences ambari_sequences set sequence_value=? where sequence_name='ds_jobimpl_"+id+"_id_seq';";
+  }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_jobimpl_"+id+"_id_seq';";
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
index f3349c6..7a197d3 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
@@ -32,13 +32,28 @@ public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
   protected String getTableIdSqlFromInstanceName() {
     return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?";
   }
+
   @Override
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')";
+  protected String getSqlInsertHiveHistoryForHive(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','','',?,?,?,'','job','','','UNKNOWN',?,'','Worksheet')";
   }
   @Override
+  protected String getSqlInsertHiveHistoryForHiveNext(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','','','',?,?,?,'','job','','','UNKNOWN',?,'','Worksheet')";
+  }
+
+  @Override
   protected String getRevSql(int id,String maxcount){
     return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "'";
   }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_jobimpl_"+id+"_id_seq'";
+  }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_jobimpl_"+id+"_id_seq'";
+  }
+
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
index 5be8cc1..dbf4f26 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
@@ -33,6 +33,20 @@ public abstract class QuerySetAmbariDB {
     return prSt;
   }
 
+  public PreparedStatement getSequenceNoFromAmbariSequence(Connection connection,int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getSqlSequenceNoFromAmbariSequence(id));
+    return prSt;
+  }
+
+  public PreparedStatement updateSequenceNoInAmbariSequence(Connection connection, int seqNo, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlUpdateSequenceNo(id));
+
+    prSt.setInt(1, seqNo);
+
+    return prSt;
+  }
+
   public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
 
     PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
@@ -40,23 +54,50 @@ public abstract class QuerySetAmbariDB {
     return prSt;
   }
 
-  public PreparedStatement insertToHiveHistory(Connection connection, int id, String maxcount, long epochtime, String dirname) throws SQLException {
+  public PreparedStatement insertToHiveHistoryForHive(Connection connection, int id, String maxcount, long epochtime, String dirname,String username) throws SQLException {
+
+    String Logfile=  dirname + "logs";
+    String queryHqlFile= dirname + "query.hql";
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistoryForHive(id));
+
+    prSt.setString(1, maxcount);
+    prSt.setLong(2, epochtime);
+    prSt.setString(3, Logfile);
+    prSt.setString(4, username);
+    prSt.setString(5, queryHqlFile);
+    prSt.setString(6, dirname);
+
+    return prSt;
+  }
+
+  public PreparedStatement insertToHiveHistoryForHiveNext(Connection connection, int id, String maxcount, long epochtime, String dirname,String username) throws SQLException {
 
     String Logfile=  dirname + "logs";
     String queryHqlFile= dirname + "query.hql";
 
-    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistory(id));
+    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistoryForHiveNext(id));
 
     prSt.setString(1, maxcount);
     prSt.setLong(2, epochtime);
     prSt.setString(3, Logfile);
-    prSt.setString(4, queryHqlFile);
-    prSt.setString(5, dirname);
+    prSt.setString(4, username);
+    prSt.setString(5, queryHqlFile);
+    prSt.setString(6, dirname);
 
     return prSt;
   }
 
-  public String RevertSql(int id,String maxcount) throws SQLException {
+  public PreparedStatement getHiveVersionInstance(Connection connection,String viewName) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getHiveVersionDetailSql());
+    prSt.setString(1, viewName);
+    return prSt;
+  }
+
+  protected String getHiveVersionDetailSql(){
+    return "select distinct(view_name) as viewname from viewentity where view_instance_name =?;";
+  }
+  public String revertSql(int id, String maxcount) throws SQLException {
     return getRevSql(id,maxcount);
   }
 
@@ -65,15 +106,26 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.%hive%.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+
+  protected String getSqlInsertHiveHistoryForHive(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','','',?,?,?,'','job','','','UNKNOWN',?,'','Worksheet');";
   }
 
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  protected String getSqlInsertHiveHistoryForHiveNext(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','','','',?,?,?,'','job','','','UNKNOWN',?,'','Worksheet');";
   }
 
   protected String getRevSql(int id,String maxcount){
     return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "';";
   }
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_jobimpl_"+id+"_id_seq';";
+  }
+
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_jobimpl_"+id+"_id_seq';";
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
index da1c411..21bd32e 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
@@ -38,11 +38,11 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getHiveInstanceSql(){
-    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';";
+    return "select distinct(view_instance_name) as instancename from viewentity where view_name like '%HIVE%';";
   }
 
   protected String getAllInstanceDetailSql(){
-    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}' or view_name='PIG{1.0.0}';";
+    return "select distinct(view_instance_name) as instancename from viewentity where view_name like '%HIVE%' or view_name like '%PIG%';";
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
index 2b40491..204ace9 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
@@ -30,7 +30,7 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
 
   @Override
   protected String getTableIdSqlFromInstanceNameSavedQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?;";
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.%hive%.resources.savedQueries.SavedQuery' and view_instance_name=?;";
   }
 
   @Override
@@ -40,17 +40,17 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
 
   @Override
   protected String getTableIdSqlFromInstanceNameHistoryQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.%hive%.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
   }
 
   @Override
   protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,?,?,'','job','','','Unknown',?,'','Worksheet');";
   }
 
   @Override
   protected String getSqlInsertSavedQuery(int id) {
-    return "INSERT INTO DS_SAVEDQUERY_" + id + " values (?,?,'" + "admin" + "',?,?,?);";
+    return "INSERT INTO DS_SAVEDQUERY_" + id + " values (?,?,?,?,?,?);";
   }
 
   @Override
@@ -62,4 +62,12 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
   protected String getRevSqlHistoryQuery(int id, String maxcount) {
     return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
   }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_savedquery_"+id+"_id_seq';";
+  }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_savedquery_"+id+"_id_seq';";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
index 11f9170..5efcc1c 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
@@ -40,11 +40,11 @@ public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
 
   @Override
   protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')";
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,?,?,'','job','','','Unknown',?,'','Worksheet')";
   }
   @Override
   protected String getSqlInsertSavedQuery(int id) {
-    return "INSERT INTO ds_savedquery_" + id + " values (?,?,'" + "admin" + "',?,?,?)";
+    return "INSERT INTO ds_savedquery_" + id + " values (?,?,?,?,?,?)";
   }
 
   @Override
@@ -55,4 +55,12 @@ public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
   protected String getRevSqlHistoryQuery(int id, String maxcount) {
     return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "'";
   }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_savedquery_"+id+"_id_seq'";
+  }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_savedquery_"+id+"_id_seq'";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
index 8ab13b2..3bfe73d 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
@@ -34,6 +34,20 @@ public abstract class QuerySetAmbariDB {
     return prSt;
   }
 
+  public PreparedStatement updateSequenceNoInAmbariSequence(Connection connection, int seqNo, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlUpdateSequenceNo(id));
+
+    prSt.setInt(1, seqNo);
+
+    return prSt;
+  }
+
+  public PreparedStatement getSequenceNoFromAmbariSequence(Connection connection,int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getSqlSequenceNoFromAmbariSequence(id));
+    return prSt;
+  }
+
   public PreparedStatement getTableIdFromInstanceNameHistoryquery(Connection connection, String instance) throws SQLException {
 
     PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceNameHistoryQuery());
@@ -70,7 +84,7 @@ public abstract class QuerySetAmbariDB {
     return prSt;
   }
 
-  public PreparedStatement insertToHiveSavedQuery(Connection connection, int id, String maxcount, String database, String dirname, String query, String name) throws SQLException {
+  public PreparedStatement insertToHiveSavedQuery(Connection connection, int id, String maxcount, String database, String dirname, String query, String name,String username) throws SQLException {
 
     String Logfile = dirname + "logs";
     String queryHqlFile = dirname + "query.hql";
@@ -79,9 +93,10 @@ public abstract class QuerySetAmbariDB {
 
     prSt.setString(1, maxcount);
     prSt.setString(2, database);
-    prSt.setString(3, queryHqlFile);
-    prSt.setString(4, query);
-    prSt.setString(5, name);
+    prSt.setString(3, username);
+    prSt.setString(4, queryHqlFile);
+    prSt.setString(5, query);
+    prSt.setString(6, name);
 
     return prSt;
   }
@@ -101,7 +116,7 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getTableIdSqlFromInstanceNameSavedQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?;";
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.%hive%.resources.savedQueries.SavedQuery' and view_instance_name=?;";
   }
 
   protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
@@ -109,7 +124,7 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getTableIdSqlFromInstanceNameHistoryQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.%hive%.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
   }
 
   protected String getSqlInsertHiveHistory(int id) {
@@ -117,7 +132,7 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getSqlInsertSavedQuery(int id) {
-    return "INSERT INTO ds_savedquery_" + id + " values (?,?,'" + "admin" + "',?,?,?);";
+    return "INSERT INTO ds_savedquery_" + id + " values (?,?,?,?,?,?);";
   }
 
   protected String getRevSqlSavedQuery(int id, String maxcount) {
@@ -128,4 +143,12 @@ public abstract class QuerySetAmbariDB {
     return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "';";
   }
 
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_savedquery_"+id+"_id_seq';";
+  }
+
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_savedquery_"+id+"_id_seq';";
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
index 6d878fe..1241811 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
@@ -33,7 +33,7 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getPigInstanceSql(){
-   return  "select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';";
+   return  "select distinct(view_instance_name) as instancename from viewentity where view_name like '%PIG%';";
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
index fdc2484..b85a552 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
@@ -34,10 +34,18 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
   }
   @Override
   protected String getSqlinsertToPigJob(int id) {
-    return "INSERT INTO DS_PIGJOB_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?);";
+    return "INSERT INTO DS_PIGJOB_" + id + " values (?,?,0,'',0,'','',?,0,?,'',?,'','',?,?,'',?);";
   }
   @Override
   protected String getRevSql(int id, String maxcount) {
     return "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcount + "';";
   }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_pigjob_"+id+"_id_seq';";
+  }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_pigjob_"+id+"_id_seq';";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
index 2c27409..dea8429 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
@@ -32,10 +32,18 @@ public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
   }
   @Override
   protected String getSqlinsertToPigJob(int id) {
-    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?)";
+    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','',?,0,?,'',?,'','',?,?,'',?)";
   }
   @Override
   protected String getRevSql(int id, String maxcount) {
     return "delete from  ds_pigjob_" + id + " where ds_id='" + maxcount + "'";
   }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_pigjob_"+id+"_id_seq'";
+  }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_pigjob_"+id+"_id_seq'";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
index ec70e45..7dddd18 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
@@ -34,13 +34,27 @@ public abstract class QuerySetAmbariDB {
     return prSt;
   }
 
+  public PreparedStatement getSequenceNoFromAmbariSequence(Connection connection,int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getSqlSequenceNoFromAmbariSequence(id));
+    return prSt;
+  }
+
   public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
 
     PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
     return prSt;
   }
 
-  public PreparedStatement insertToPigJob(String dirname, String maxcountforpigjob, long epochtime1, String title, Connection connection, int id, String status) throws SQLException {
+  public PreparedStatement updateSequenceNoInAmbariSequence(Connection connection, int seqNo, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlUpdateSequenceNo(id));
+
+    prSt.setInt(1, seqNo);
+
+    return prSt;
+  }
+
+  public PreparedStatement insertToPigJob(String dirname, String maxcountforpigjob, long epochtime1, String title, Connection connection, int id, String status,String username) throws SQLException {
 
     String pigScriptFile = dirname + "script.pig";
 
@@ -48,11 +62,12 @@ public abstract class QuerySetAmbariDB {
 
     prSt.setString(1, maxcountforpigjob);
     prSt.setLong(2, epochtime1);
-    prSt.setString(3, pigScriptFile);
-    prSt.setString(4, maxcountforpigjob);
-    prSt.setString(5, status);
-    prSt.setString(6, dirname);
-    prSt.setString(7, title);
+    prSt.setString(3, username);
+    prSt.setString(4, pigScriptFile);
+    prSt.setString(5, maxcountforpigjob);
+    prSt.setString(6, status);
+    prSt.setString(7, dirname);
+    prSt.setString(8, title);
 
     return prSt;
   }
@@ -70,11 +85,19 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getSqlinsertToPigJob(int id) {
-    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?);";
+    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','',?,0,?,'',?,'','',?,?,'',?);";
   }
 
   protected String getRevSql(int id, String maxcount) {
     return "delete from  ds_pigjob_" + id + " where ds_id='" + maxcount + "';";
   }
 
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_pigjob_"+id+"_id_seq';";
+  }
+
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_pigjob_"+id+"_id_seq';";
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
index a8c1c4d..878059f 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
@@ -33,11 +33,19 @@ public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
   }
   @Override
   protected String getSqlinsertToPigScript(int id) {
-    return  "INSERT INTO DS_PIGSCRIPT_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00',0,'admin',?,'','',?);";
+    return  "INSERT INTO DS_PIGSCRIPT_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00',0,?,?,'','',?);";
   }
   @Override
   protected String getRevSql(int id,String maxcount){
     return "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount + "';";
   }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_pigscript_"+id+"_id_seq';";
+  }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_pigscript_"+id+"_id_seq';";
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
index d7dfc0b..101c819 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
@@ -21,21 +21,29 @@ public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
   /*
   * Overriding methods for Oracle specific queries
   * */
-
+  @Override
   protected String getSqlMaxDSidFromTableId(int id) {
     return "select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "";
   }
-
+  @Override
   protected String getTableIdSqlFromInstanceName() {
     return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?";
   }
-
+  @Override
   protected String getSqlinsertToPigScript(int id) {
-    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f','admin',?,'','',?)";
+    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f',?,?,'','',?)";
   }
-
+  @Override
   protected String getRevSql(int id, String maxcount) {
     return "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount + "'";
   }
+  @Override
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_pigscript_"+id+"_id_seq'";
+  }
+  @Override
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_pigscript_"+id+"_id_seq'";
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
index 4fc61fe..d032264 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
@@ -32,18 +32,24 @@ public abstract class QuerySetAmbariDB {
     return prSt;
   }
 
+  public PreparedStatement getSequenceNoFromAmbariSequence(Connection connection,int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getSqlSequenceNoFromAmbariSequence(id));
+    return prSt;
+  }
+
   public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
 
     PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
     return prSt;
   }
 
-  public PreparedStatement insertToPigScript(Connection connection, int id, String maxcount1, String dirname, String title) throws SQLException {
+  public PreparedStatement insertToPigScript(Connection connection, int id, String maxcount1, String dirname, String title,String username) throws SQLException {
 
     PreparedStatement prSt = connection.prepareStatement(getSqlinsertToPigScript(id));
     prSt.setString(1, maxcount1);
-    prSt.setString(2, dirname);
-    prSt.setString(3, title);
+    prSt.setString(2, username);
+    prSt.setString(3, dirname);
+    prSt.setString(4, title);
 
     return prSt;
   }
@@ -52,6 +58,15 @@ public abstract class QuerySetAmbariDB {
     return getRevSql(id, maxcount);
   }
 
+  public PreparedStatement updateSequenceNoInAmbariSequence(Connection connection, int seqNo, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlUpdateSequenceNo(id));
+
+    prSt.setInt(1, seqNo);
+
+    return prSt;
+  }
+
   protected String getSqlMaxDSidFromTableId(int id) {
     return "select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";";
   }
@@ -61,10 +76,18 @@ public abstract class QuerySetAmbariDB {
   }
 
   protected String getSqlinsertToPigScript(int id) {
-    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f','admin',?,'','',?);";
+    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f',?,?,'','',?);";
   }
 
   protected String getRevSql(int id, String maxcount) {
     return "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount + "';";
   }
+
+  protected String getSqlSequenceNoFromAmbariSequence(int id) {
+    return "select sequence_value from ambari_sequences where sequence_name ='ds_pigscript_"+id+"_id_seq';";
+  }
+
+  protected String getSqlUpdateSequenceNo(int id) {
+    return "update ambari_sequences set sequence_value=? where sequence_name='ds_pigscript_"+id+"_id_seq';";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
index bdcf293..2c205b0 100755
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
@@ -79,7 +79,7 @@ public class HiveHistoryMigrationUtility {
 
     MigrationModel model = new MigrationModel();
 
-    int maxCountOfAmbariDb, i = 0;
+    int maxCountOfAmbariDb = 0, i = 0, sequence;
     String time = null;
     Long epochTime = null;
     String dirNameforHiveHistroy;
@@ -145,6 +145,12 @@ public class HiveHistoryMigrationUtility {
 
         // for each queries fetched from Hue database//
 
+        //
+        int id = 0;
+
+        id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
+        sequence = hiveHistoryQueryImpl.fetchSequenceno(connectionAmbaridb, id, ambaridatabase);
+        //
         for (i = 0; i < dbpojoHiveHistoryQuery.size(); i++) {
 
           float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
@@ -159,9 +165,6 @@ public class HiveHistoryMigrationUtility {
           logger.info("Loop No." + (i + 1));
           logger.info("_____________________");
           logger.info("Hue query that has been fetched" + dbpojoHiveHistoryQuery.get(i).getQuery());
-          int id = 0;
-
-          id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
 
           logger.info("Table name has been fetched from intance name");
 
@@ -173,35 +176,38 @@ public class HiveHistoryMigrationUtility {
 
           logger.info("Log file created in Temp directory");
 
-          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(connectionAmbaridb, id, ambaridatabase) + 1);// fetching the maximum count for ambari db to insert
+          maxCountOfAmbariDb = i + sequence + 1;
 
           time = hiveHistoryQueryImpl.getTime();// getting the system current time.
 
           epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
 
-          dirNameforHiveHistroy = "/user/admin/migration/jobs/migration-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
+          dirNameforHiveHistroy = "/user/"+username+"/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
 
           logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
 
-          hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase);// inserting in ambari database
+          String versionName = hiveHistoryQueryImpl.getAllHiveVersionInstance(connectionAmbaridb, ambaridatabase, instance);
+
+          hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase, versionName, username);// inserting in ambari database
 
           if (view.getProperties().get("KerberoseEnabled").equals("y")) {
 
             logger.info("kerberose enabled");
-            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
+            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// creating directory in kerborized secured hdfs
             logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// copying the .hql file to kerborized hdfs
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// copying the log file to kerborized hdfs
           } else {
 
             logger.info("kerberose not enabled");
-            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username);// creating directory in hdfs
             logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
-            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
+            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username);// copying the .hql file to hdfs
+            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"),username);// copying the log file to hdfs
           }
 
         }
+        hiveHistoryQueryImpl.updateSequenceno(connectionAmbaridb, maxCountOfAmbariDb, id, ambaridatabase);
         connectionAmbaridb.commit();
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
index a0182f6..55c81c0 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
@@ -36,16 +36,15 @@ import org.jdom.input.SAXBuilder;
 import org.jdom.output.Format;
 import org.jdom.output.XMLOutputter;
 
+import java.beans.PropertyVetoException;
 import java.io.*;
 import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.sql.*;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
+import java.util.*;
 import java.util.Date;
-import java.util.GregorianCalendar;
 
 public class HiveHistoryQueryMigrationImplementation {
 
@@ -131,28 +130,79 @@ public class HiveHistoryQueryMigrationImplementation {
   }
 
 
-  public void insertRowinAmbaridb(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+  public void insertRowinAmbaridb(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase, String versionName, String username) throws SQLException, IOException {
 
     String maxcount1 = Integer.toString(maxcount);
     String epochtime1 = Long.toString(epochtime);
     PreparedStatement prSt = null;
     String revsql = null;
+    if (versionName.contains("1.5")) {
+      prSt = ambaridatabase.insertToHiveHistoryForHiveNext(c, id, maxcount1, epochtime, dirname, username);
+    }
+    if (versionName.contains("1.0")) {
+      prSt = ambaridatabase.insertToHiveHistoryForHive(c, id, maxcount1, epochtime, dirname, username);
+    }
+    logger.info("The actual insert statement is " + prSt);
+    prSt.executeUpdate();
+    revsql = ambaridatabase.revertSql(id, maxcount1);
+    logger.info("adding revert sql hive history");
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+  }
 
-    prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
+  public void updateSequenceno(Connection c, int seqNo, int id, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
 
+    PreparedStatement prSt;
+    prSt = ambaridatabase.updateSequenceNoInAmbariSequence(c, seqNo, id);
     logger.info("The actual insert statement is " + prSt);
-
     prSt.executeUpdate();
+    logger.info("adding revert sql hive history");
+  }
 
-    revsql = ambaridatabase.RevertSql(id, maxcount1);
+  public String getAllHiveVersionInstance(Connection c, QuerySetAmbariDB ambaridatabase, String viewName) throws PropertyVetoException, SQLException, IOException {
 
-    logger.info("adding revert sql hive history");
 
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+    PreparedStatement prSt;
+    ResultSet rs1 = null;
+    String instanceVersion = null;
+    int i = 0;
+    prSt = ambaridatabase.getHiveVersionInstance(c, viewName);
+    rs1 = prSt.executeQuery();
+
+    while (rs1.next()) {
 
+      instanceVersion = rs1.getString(1);
+
+    }
+    rs1.close();
+    prSt.close();
+    return instanceVersion;
 
   }
 
+  public int fetchSequenceno(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+    String ds_id = new String();
+    Statement stmt = null;
+    PreparedStatement prSt = null;
+    int sequencevalue = 0;
+
+
+    ResultSet rs = null;
+
+
+    prSt = ambaridatabase.getSequenceNoFromAmbariSequence(c, id);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
+
+    while (rs.next()) {
+      sequencevalue = rs.getInt("sequence_value");
+    }
+    return sequencevalue;
+  }
+
+
   public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = new String();
@@ -177,7 +227,7 @@ public class HiveHistoryQueryMigrationImplementation {
   }
 
   public long getEpochTime() throws ParseException {
-    long seconds = System.currentTimeMillis() / 1000l;
+    long seconds = System.currentTimeMillis();
     return seconds;
 
   }
@@ -364,7 +414,7 @@ public class HiveHistoryQueryMigrationImplementation {
 
   }
 
-  public void createDir(final String dir, final String namenodeuri) throws IOException,
+  public void createDir(final String dir, final String namenodeuri, final String username) throws IOException,
     URISyntaxException {
 
     try {
@@ -387,8 +437,10 @@ public class HiveHistoryQueryMigrationImplementation {
         public Boolean run() throws Exception {
 
           FileSystem fs = FileSystem.get(conf);
+
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
           return b;
         }
       });
@@ -397,7 +449,7 @@ public class HiveHistoryQueryMigrationImplementation {
     }
   }
 
-  public void createDirKerberorisedSecured(final String dir, final String namenodeuri) throws IOException,
+  public void createDirKerberorisedSecured(final String dir, final String namenodeuri, final String username,final String principalName) throws IOException,
     URISyntaxException {
 
     try {
@@ -410,16 +462,18 @@ public class HiveHistoryQueryMigrationImplementation {
         org.apache.hadoop.fs.LocalFileSystem.class.getName()
       );
       conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
       conf.set("hadoop.security.authentication", "Kerberos");
       UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
       ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
 
         public Boolean run() throws Exception {
           FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
           return b;
         }
       });
@@ -429,7 +483,7 @@ public class HiveHistoryQueryMigrationImplementation {
   }
 
 
-  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri,final String username)
     throws IOException {
 
     try {
@@ -476,6 +530,7 @@ public class HiveHistoryQueryMigrationImplementation {
           }
           in.close();
           out.close();
+          fileSystem.setOwner(path,username,"hadoop");
           fileSystem.close();
           return null;
         }
@@ -486,7 +541,7 @@ public class HiveHistoryQueryMigrationImplementation {
 
   }
 
-  public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri)
+  public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri,final String username,final String principalName)
     throws IOException {
 
     try {
@@ -500,11 +555,11 @@ public class HiveHistoryQueryMigrationImplementation {
         org.apache.hadoop.fs.LocalFileSystem.class.getName()
       );
       conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
       conf.set("hadoop.security.authentication", "Kerberos");
       UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
       ugi.doAs(new PrivilegedExceptionAction<Void>() {
 
         public Void run() throws Exception {
@@ -537,6 +592,7 @@ public class HiveHistoryQueryMigrationImplementation {
           }
           in.close();
           out.close();
+          fileSystem.setOwner(path,username,"hadoop");
           fileSystem.close();
           return null;
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
index 7bd48b2..4d9a6c3 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
@@ -150,6 +150,39 @@ public class HiveSavedQueryMigrationImplementation {
     return num;
   }
 
+
+  public int fetchSequenceno(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+    String ds_id = new String();
+    Statement stmt = null;
+    PreparedStatement prSt = null;
+    int sequencevalue=0;
+
+
+    ResultSet rs = null;
+
+
+    prSt = ambaridatabase.getSequenceNoFromAmbariSequence(c, id);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
+
+    while (rs.next()) {
+      sequencevalue = rs.getInt("sequence_value");
+    }
+    return sequencevalue;
+  }
+
+  public void updateSequenceno(Connection c, int seqNo, int id, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+
+    PreparedStatement prSt;
+    prSt = ambaridatabase.updateSequenceNoInAmbariSequence(c, seqNo, id);
+    logger.info("The actual insert statement is " + prSt);
+    prSt.executeUpdate();
+    logger.info("adding revert sql hive history");
+  }
+
   public int fetchInstancetablenameForSavedqueryHive(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = new String();
@@ -245,14 +278,14 @@ public class HiveSavedQueryMigrationImplementation {
     wrtitetoalternatesqlfile(dirname, revsql, instance, i);
   }
 
-  public void insertRowinSavedQuery(int maxcount, String database, String dirname, String query, String name, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+  public void insertRowinSavedQuery(int maxcount, String database, String dirname, String query, String name, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase,String username) throws SQLException, IOException {
 
     String maxcount1 = Integer.toString(maxcount);
     String revsql = null;
 
     PreparedStatement prSt = null;
 
-    prSt = ambaridatabase.insertToHiveSavedQuery(c, id, maxcount1, database, dirname, query, name);
+    prSt = ambaridatabase.insertToHiveSavedQuery(c, id, maxcount1, database, dirname, query, name,username);
 
     System.out.println("the actual query is " + prSt);
 
@@ -480,7 +513,7 @@ public class HiveSavedQueryMigrationImplementation {
 
   }
 
-  public void createDirHive(final String dir, final String namenodeuri)
+  public void createDirHive(final String dir, final String namenodeuri,final String username)
     throws IOException, URISyntaxException {
 
     try {
@@ -506,6 +539,7 @@ public class HiveSavedQueryMigrationImplementation {
           FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
           return null;
         }
       });
@@ -514,9 +548,8 @@ public class HiveSavedQueryMigrationImplementation {
     }
   }
 
-  public void createDirHiveSecured(final String dir, final String namenodeuri)
+  public void createDirHiveSecured(final String dir, final String namenodeuri,final String username,final String principalName)
     throws IOException, URISyntaxException {
-
     try {
       final Configuration conf = new Configuration();
 
@@ -527,29 +560,28 @@ public class HiveSavedQueryMigrationImplementation {
         org.apache.hadoop.fs.LocalFileSystem.class.getName()
       );
       conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
       conf.set("hadoop.security.authentication", "Kerberos");
-
       UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
 
+        public Boolean run() throws Exception {
           FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
+          Boolean b = fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
+          return b;
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs: ", e);
+      logger.error("Exception in Webhdfs", e);
     }
   }
 
   public void putFileinHdfs(final String source, final String dest,
-                            final String namenodeuri) throws IOException {
+                            final String namenodeuri,final String username) throws IOException {
 
     try {
       final Configuration conf = new Configuration();
@@ -596,6 +628,7 @@ public class HiveSavedQueryMigrationImplementation {
           }
           in.close();
           out.close();
+          fileSystem.setOwner(path,username,"hadoop");
           fileSystem.close();
           return null;
         }
@@ -607,10 +640,10 @@ public class HiveSavedQueryMigrationImplementation {
   }
 
 
-  public void putFileinHdfsSecured(final String source, final String dest,
-                                   final String namenodeuri) throws IOException {
+  public void putFileinHdfsSecured(final String source, final String dest,final String namenodeuri,final String username,final String principalName) throws IOException {
 
     try {
+
       final Configuration conf = new Configuration();
 
       conf.set("fs.hdfs.impl",
@@ -620,16 +653,15 @@ public class HiveSavedQueryMigrationImplementation {
         org.apache.hadoop.fs.LocalFileSystem.class.getName()
       );
       conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
       conf.set("hadoop.security.authentication", "Kerberos");
-
       UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
       ugi.doAs(new PrivilegedExceptionAction<Void>() {
 
         public Void run() throws Exception {
 
-
           FileSystem fileSystem = FileSystem.get(conf);
 
           String filename = source.substring(
@@ -645,7 +677,7 @@ public class HiveSavedQueryMigrationImplementation {
           if (fileSystem.exists(path)) {
 
           }
-          // Path pathsource = new Path(source);
+
           FSDataOutputStream out = fileSystem.create(path);
 
           InputStream in = new BufferedInputStream(
@@ -658,14 +690,14 @@ public class HiveSavedQueryMigrationImplementation {
           }
           in.close();
           out.close();
+          fileSystem.setOwner(path,username,"hadoop");
           fileSystem.close();
-
-
           return null;
         }
       });
     } catch (Exception e) {
       logger.error("Webhdfs exception", e);
+
     }
 
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
index 9f9e053..566b369 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
@@ -47,7 +47,6 @@ import java.util.ArrayList;
 public class HiveSavedQueryMigrationUtility {
 
 
-
   protected MigrationResourceManager resourceManager = null;
 
   public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
@@ -78,43 +77,31 @@ public class HiveSavedQueryMigrationUtility {
 
     HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object  */
 
-    QuerySet huedatabase=null;
+    QuerySet huedatabase = null;
 
-    if(view.getProperties().get("huedrivername").contains("mysql"))
-    {
-      huedatabase=new MysqlQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("postgresql"))
-    {
-      huedatabase=new PostgressQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("sqlite"))
-    {
-     huedatabase=new SqliteQuerySet();
-    }
-    else if (view.getProperties().get("huedrivername").contains("oracle"))
-    {
-      huedatabase=new OracleQuerySet();
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+      huedatabase = new SqliteQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySet();
     }
 
 
-    QuerySetAmbariDB ambaridatabase=null;
+    QuerySetAmbariDB ambaridatabase = null;
 
 
-    if(view.getProperties().get("ambaridrivername").contains("mysql"))
-    {
-      ambaridatabase=new MysqlQuerySetAmbariDB();
-    }
-    else if(view.getProperties().get("ambaridrivername").contains("postgresql"))
-    {
-      ambaridatabase=new PostgressQuerySetAmbariDB();
-    }
-    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
-    {
-      ambaridatabase= new OracleQuerySetAmbariDB();
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
     }
 
-    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
+    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb = 0;
     String time = null;
     Long epochtime = null;
     String dirNameforHiveSavedquery;
@@ -124,12 +111,11 @@ public class HiveSavedQueryMigrationUtility {
 
       connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
 
-      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, startDate, endDate, connectionHuedb,huedatabase); /* fetching data from hue db and storing it in to a model */
+      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, startDate, endDate, connectionHuedb, huedatabase); /* fetching data from hue db and storing it in to a model */
 
 
-      for(int j=0;j<dbpojoHiveSavedQuery.size();j++)
-      {
-        logger.info("the query fetched from hue"+dbpojoHiveSavedQuery.get(j).getQuery());
+      for (int j = 0; j < dbpojoHiveSavedQuery.size(); j++) {
+        logger.info("the query fetched from hue" + dbpojoHiveSavedQuery.get(j).getQuery());
 
       }
 
@@ -151,6 +137,9 @@ public class HiveSavedQueryMigrationUtility {
         connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
         connectionAmbaridb.setAutoCommit(false);
 
+        int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(connectionAmbaridb, instance, ambaridatabase); /* fetching the instance table name for migration saved query  from the given instance name */
+        int sequence = hivesavedqueryimpl.fetchSequenceno(connectionAmbaridb, tableIdSavedQuery, ambaridatabase);
+
         for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
 
           logger.info("_____________________");
@@ -162,19 +151,12 @@ public class HiveSavedQueryMigrationUtility {
 
           migrationresult.setIsNoQuerySelected("no");
           migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
+          migrationresult.setNumberOfQueryTransfered(i + 1);
           migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
           getResourceManager(view).update(migrationresult, jobid);
 
-
-
-
           logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
 
-          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(connectionAmbaridb, instance,ambaridatabase); /* fetching the instance table name for migration saved query  from the given instance name */
-
-          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(connectionAmbaridb, instance,ambaridatabase); /* fetching the instance table name for migration history query from the given instance name */
-
           logger.info("Table name are fetched from instance name.");
 
           hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir()); /* writing migration query to a local file*/
@@ -183,43 +165,37 @@ public class HiveSavedQueryMigrationUtility {
 
           logger.info(".hql and logs file are saved in temporary directory");
 
-          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory( connectionAmbaridb, tableIdHistoryHive,ambaridatabase) + 1);/* fetching the maximum ds_id from migration history table*/
-
-          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(connectionAmbaridb, tableIdSavedQuery,ambaridatabase) + 1);/* fetching the maximum ds_id from migration saved query table*/
+          maxCountforSavequeryAmbaridb = i + sequence + 1;
 
           time = hivesavedqueryimpl.getTime();/* getting system time */
 
-          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
-
-          dirNameforHiveSavedquery = "/user/admin/migration/jobs/migration-job-" + maxcountForHivehistroryAmbaridb + "-"
+          dirNameforHiveSavedquery = "/user/"+username+"/hive/scripts/hive-query-" + maxCountforSavequeryAmbaridb + "-"
             + time + "/"; // creating hdfs directory name
 
           logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
 
-          hivesavedqueryimpl.insertRowHiveHistory(dirNameforHiveSavedquery,maxcountForHivehistroryAmbaridb,epochtime,connectionAmbaridb,tableIdHistoryHive,instance,i,ambaridatabase);// inserting to migration history table
-
           logger.info("Row inserted in hive History table.");
 
           if (view.getProperties().get("KerberoseEnabled").equals("y")) {
 
             logger.info("Kerberose Enabled");
-            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
+            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// creating directory in hdfs in kerborized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// putting .hql file in hdfs in kerberoroized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));// putting logs file in hdfs in kerberoroized cluster
 
           } else {
-
             logger.info("Kerberose Not Enabled");
-            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
-            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
+            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username);// creating directory in hdfs
+            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username);// putting .hql file in hdfs directory
+            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"),username);// putting logs file in hdfs
           }
 
           //inserting into hived saved query table
           //6.
-          hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i,ambaridatabase);
+          hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i, ambaridatabase, username);
 
         }
+        hivesavedqueryimpl.updateSequenceno(connectionAmbaridb, maxCountforSavequeryAmbaridb, tableIdSavedQuery, ambaridatabase);
         connectionAmbaridb.commit();
 
       }
@@ -236,13 +212,13 @@ public class HiveSavedQueryMigrationUtility {
 
       }
     } catch (ClassNotFoundException e1) {
-      logger.error("Class not found : " , e1);
+      logger.error("Class not found : ", e1);
     } catch (ParseException e) {
-      logger.error("ParseException: " , e);
+      logger.error("ParseException: ", e);
     } catch (URISyntaxException e) {
-      logger.error("URISyntaxException: " , e);
+      logger.error("URISyntaxException: ", e);
     } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException:" , e);
+      logger.error("PropertyVetoException:", e);
     } finally {
       if (null != connectionAmbaridb)
         try {
@@ -259,14 +235,13 @@ public class HiveSavedQueryMigrationUtility {
     long stopTime = System.currentTimeMillis();
     long elapsedTime = stopTime - startTime;
 
-    MigrationModel model=new MigrationModel();
+    MigrationModel model = new MigrationModel();
 
     migrationresult.setJobtype("hivesavedquerymigration");
     migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
     getResourceManager(view).update(migrationresult, jobid);
 
 
-
     logger.info("-------------------------------");
     logger.info("hive saved query Migration end");
     logger.info("--------------------------------");

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
index 0445132..de81f0b 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
@@ -44,8 +44,6 @@ public class HiveSavedQueryStartJob extends Thread{
     this.view=view;
   }
 
-
-
   @Override
   public void run() {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
index 64e7069..8f843d0 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
@@ -32,8 +32,8 @@ import java.util.GregorianCalendar;
 import java.io.*;
 import java.net.URISyntaxException;
 ;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.QuerySetAmbariDB;
 import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.QuerySet;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
 import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -141,6 +141,29 @@ public class PigJobMigrationImplementation {
 
   }
 
+  public int fetchSequenceno(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
+    String ds_id = new String();
+    Statement stmt = null;
+    PreparedStatement prSt = null;
+    int sequencevalue=0;
+
+
+    ResultSet rs = null;
+
+
+    prSt = ambaridatabase.getSequenceNoFromAmbariSequence(c, id);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
+
+    while (rs.next()) {
+      sequencevalue = rs.getInt("sequence_value");
+    }
+    return sequencevalue;
+  }
+
   public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
 
@@ -165,7 +188,7 @@ public class PigJobMigrationImplementation {
     return id;
   }
 
-  public void insertRowPigJob(String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+  public void insertRowPigJob(String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i, QuerySetAmbariDB ambaridatabase,String username) throws SQLException, IOException {
 
     String epochtime1 = Long.toString(epochtime);
     String maxcountforpigjob1 = Integer.toString(maxcountforpigjob);
@@ -174,7 +197,7 @@ public class PigJobMigrationImplementation {
 
     PreparedStatement prSt = null;
 
-    prSt = ambaridatabase.insertToPigJob(dirname, maxcountforpigjob1, epochtime, title, c, id, status);
+    prSt = ambaridatabase.insertToPigJob(dirname, maxcountforpigjob1, epochtime, title, c, id, status,username);
 
     prSt.executeUpdate();
 
@@ -341,7 +364,23 @@ public class PigJobMigrationImplementation {
 
   }
 
-  public void createDirPigJob(final String dir, final String namenodeuri) throws IOException,
+  public void updateSequenceno(Connection c, int seqNo, int id, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+
+    PreparedStatement prSt;
+
+    prSt = ambaridatabase.updateSequenceNoInAmbariSequence(c, seqNo, id);
+
+    logger.info("The actual insert statement is " + prSt);
+
+    prSt.executeUpdate();
+
+
+    logger.info("adding revert sql hive history");
+
+
+
+  }
+  public void createDirPigJob(final String dir, final String namenodeuri,final String username) throws IOException,
     URISyntaxException {
 
     try {
@@ -365,6 +404,7 @@ public class PigJobMigrationImplementation {
           FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
           return null;
         }
       });
@@ -374,9 +414,8 @@ public class PigJobMigrationImplementation {
   }
 
   /**/
-  public void createDirPigJobSecured(final String dir, final String namenodeuri) throws IOException,
+  public void createDirPigJobSecured(final String dir, final String namenodeuri,final String username,final String principalName) throws IOException,
     URISyntaxException {
-
     try {
       final Configuration conf = new Configuration();
 
@@ -387,30 +426,28 @@ public class PigJobMigrationImplementation {
         org.apache.hadoop.fs.LocalFileSystem.class.getName()
       );
       conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
       conf.set("hadoop.security.authentication", "Kerberos");
-
       UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
 
+        public Boolean run() throws Exception {
           FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
+          Boolean b = fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
+          return b;
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
+      logger.error("Exception in Webhdfs", e);
     }
   }
 
   /**/
-  public void copyFileBetweenHdfs(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+  public void copyFileBetweenHdfs(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue,final String username)
     throws IOException {
 
     try {
@@ -457,6 +494,7 @@ public class PigJobMigrationImplementation {
           }
           in1.close();
           out.close();
+          fileSystemAmbari.setOwner(path,username,"hadoop");
           fileSystemAmbari.close();
           return null;
         }
@@ -468,7 +506,7 @@ public class PigJobMigrationImplementation {
   }
 
   /**/
-  public void copyFileBetweenHdfsSecured(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+  public void copyFileBetweenHdfsSecured(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue,final String username,final String pricipalName)
     throws IOException {
 
     try {
@@ -484,8 +522,10 @@ public class PigJobMigrationImplementation {
       confAmbari.set("hadoop.security.authentication", "Kerberos");
       confHue.set("hadoop.security.authentication", "Kerberos");
 
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
+      UserGroupInformation.setConfiguration(confAmbari);
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(pricipalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
 
       ugi.doAs(new PrivilegedExceptionAction<Void>() {
 
@@ -519,6 +559,7 @@ public class PigJobMigrationImplementation {
           }
           in1.close();
           out.close();
+          fileSystemAmbari.setOwner(path,username,"hadoop");
           fileSystemAmbari.close();
           return null;
         }


Mime
View raw message