ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gna...@apache.org
Subject [1/2] ambari git commit: AMBARI-18208. Bug Fixing in HueMIgration View. (Pradarttana Panda via gauravn7)
Date Sun, 21 Aug 2016 15:15:25 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 74945492b -> 7a34fe1d8


http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
index 5d99b49..c9cfc9d 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
@@ -41,7 +41,7 @@ import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatab
 import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
 import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
 
-public class PigJobMigrationUtility  {
+public class PigJobMigrationUtility {
 
   protected MigrationResourceManager resourceManager = null;
 
@@ -70,40 +70,28 @@ public class PigJobMigrationUtility  {
 
     PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating
the implementation object
 
-    QuerySet huedatabase=null;
+    QuerySet huedatabase = null;
 
-    if(view.getProperties().get("huedrivername").contains("mysql"))
-    {
-      huedatabase=new MysqlQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("postgresql"))
-    {
-      huedatabase=new PostgressQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("sqlite"))
-    {
-      huedatabase=new SqliteQuerySet();
-    }
-    else if (view.getProperties().get("huedrivername").contains("oracle"))
-    {
-      huedatabase=new OracleQuerySet();
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+      huedatabase = new SqliteQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySet();
     }
 
-    QuerySetAmbariDB ambaridatabase=null;
+    QuerySetAmbariDB ambaridatabase = null;
 
-    if(view.getProperties().get("ambaridrivername").contains("mysql"))
-    {
-      ambaridatabase=new MysqlQuerySetAmbariDB();
-    }
-    else if(view.getProperties().get("ambaridrivername").contains("postgresql"))
-    {
-      ambaridatabase=new PostgressQuerySetAmbariDB();
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
     }
-    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
-    {
-      ambaridatabase= new OracleQuerySetAmbariDB();
-    }
-    int maxCountforPigScript = 0,i=0;
+    int maxCountforPigScript = 0, i = 0;
 
     String time = null, timeIndorder = null;
     Long epochtime = null;
@@ -114,11 +102,10 @@ public class PigJobMigrationUtility  {
 
       connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),
view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting
to hue database
 
-      pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb,huedatabase);//
fetching the PigJobs details from hue
+      pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb,
huedatabase);// fetching the PigJobs details from hue
 
-      for(int j=0;j<pigJobDbPojo.size();j++)
-      {
-        logger.info("the query fetched from hue="+pigJobDbPojo.get(i).getScript());
+      for (int j = 0; j < pigJobDbPojo.size(); j++) {
+        logger.info("the query fetched from hue=" + pigJobDbPojo.get(i).getScript());
 
       }
 
@@ -137,54 +124,52 @@ public class PigJobMigrationUtility  {
         connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),
view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
         connectionAmbaridb.setAutoCommit(false);
 
+        int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb,
instance, ambaridatabase);
+        int sequence = pigjobimpl.fetchSequenceno(connectionAmbaridb, fetchPigTablenameInstance,
ambaridatabase);
+
         for (i = 0; i < pigJobDbPojo.size(); i++) {
 
           float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
           int progressPercentage = Math.round(calc);
           migrationresult.setIsNoQuerySelected("no");
           migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
+          migrationresult.setNumberOfQueryTransfered(i + 1);
           migrationresult.setTotalNoQuery(pigJobDbPojo.size());
           getResourceManager(view).update(migrationresult, jobid);
 
-
-
-
           logger.info("Loop No." + (i + 1));
           logger.info("________________");
           logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
 
-          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb,
instance,ambaridatabase);
-
-          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(connectionAmbaridb, fetchPigTablenameInstance,ambaridatabase)
+ 1);
+          maxCountforPigScript = i + sequence + 1;
 
           time = pigjobimpl.getTime();
           timeIndorder = pigjobimpl.getTimeInorder();
           epochtime = pigjobimpl.getEpochTime();
 
-          pigJobDirName = "/user/admin/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_"
+ time + "/";
+          pigJobDirName = "/user/"+username+"/pig/jobs/" + pigJobDbPojo.get(i).getTitle()
+ "_" + time + "/";
 
-          pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder,
epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance,
pigJobDbPojo.get(i).getStatus(), instance, i,ambaridatabase);
+          pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder,
epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance,
pigJobDbPojo.get(i).getStatus(), instance, i, ambaridatabase, username);
 
           if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username,view.getProperties().get("PrincipalUserName"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username,view.getProperties().get("PrincipalUserName"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username,view.getProperties().get("PrincipalUserName"));
 
           } else {
 
-            pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName,
view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName,
view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"),username);
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig",
pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username);
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName,
view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username);
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName,
view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"),username);
 
           }
 
           logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
 
         }
+        pigjobimpl.updateSequenceno(connectionAmbaridb, maxCountforPigScript, fetchPigTablenameInstance,
ambaridatabase);
         connectionAmbaridb.commit();
       }
 
@@ -194,16 +179,16 @@ public class PigJobMigrationUtility  {
         connectionAmbaridb.rollback();
         logger.info("roll back done");
       } catch (SQLException e1) {
-        logger.error("roll back  exception:",e1);
+        logger.error("roll back  exception:", e1);
       }
     } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception:",e2);
+      logger.error("class not found exception:", e2);
     } catch (ParseException e) {
-      logger.error("ParseException: " ,e);
+      logger.error("ParseException: ", e);
     } catch (URISyntaxException e) {
-      logger.error("URISyntaxException" ,e);
+      logger.error("URISyntaxException", e);
     } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException" ,e);
+      logger.error("PropertyVetoException", e);
     } finally {
       if (null != connectionAmbaridb)
         try {
@@ -217,13 +202,6 @@ public class PigJobMigrationUtility  {
     logger.info("pig Job Migration End");
     logger.info("------------------------------");
 
-    //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-//    CheckProgresStatus.setProgressPercentage(0);
-//    CheckProgresStatus.setNoOfQueryCompleted(0);
-//    CheckProgresStatus.setTotalNoOfQuery(0);
-//    CheckProgresStatus.setNoOfQueryLeft(0);
-
     long stopTime = System.currentTimeMillis();
     long elapsedTime = stopTime - startTime;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
index c5f073c..eaf5d38 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
@@ -43,8 +43,6 @@ public class PigSavedScriptStartJob extends Thread{
     this.view=view;
   }
 
-
-
   @Override
   public void run() {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
index c8aa1c0..b259cfb 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
@@ -39,6 +39,7 @@ import org.jdom.output.Format;
 import org.jdom.output.XMLOutputter;
 
 import java.io.*;
+import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.sql.*;
 import java.text.ParseException;
@@ -127,6 +128,29 @@ public class PigScriptMigrationImplementation {
 
   }
 
+  public int fetchSequenceno(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws
SQLException {
+
+    String ds_id = new String();
+    Statement stmt = null;
+    PreparedStatement prSt = null;
+    int sequencevalue=0;
+
+
+    ResultSet rs = null;
+
+
+    prSt = ambaridatabase.getSequenceNoFromAmbariSequence(c, id);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
+
+    while (rs.next()) {
+      sequencevalue = rs.getInt("sequence_value");
+    }
+    return sequencevalue;
+  }
+
   public int fetchInstanceTablenamePigScript(Connection c, String instance, QuerySetAmbariDB
ambaridatabase) throws SQLException {
 
     String ds_id = new String();
@@ -175,7 +199,16 @@ public class PigScriptMigrationImplementation {
     return num;
   }
 
-  public void insertRowForPigScript(String dirname, int maxcountforpigjob, int maxcount,
String time, String time2, long epochtime, String title, Connection c, int id, String instance,
int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+  public void updateSequenceno(Connection c, int seqNo, int id, QuerySetAmbariDB ambaridatabase)
throws SQLException, IOException {
+
+    PreparedStatement prSt;
+    prSt = ambaridatabase.updateSequenceNoInAmbariSequence(c, seqNo, id);
+    logger.info("The actual insert statement is " + prSt);
+    prSt.executeUpdate();
+    logger.info("adding revert sql hive history");
+  }
+
+  public void insertRowForPigScript(String dirname, int maxcountforpigjob, int maxcount,
String time, String time2, long epochtime, String title, Connection c, int id, String instance,
int i, QuerySetAmbariDB ambaridatabase,String username) throws SQLException, IOException {
 
     String maxcount1 = Integer.toString(maxcount);
     String epochtime1 = Long.toString(epochtime);
@@ -183,7 +216,7 @@ public class PigScriptMigrationImplementation {
 
     PreparedStatement prSt = null;
 
-    prSt = ambaridatabase.insertToPigScript(c, id, maxcount1, dirname, title);
+    prSt = ambaridatabase.insertToPigScript(c, id, maxcount1, dirname, title,username);
 
     prSt.executeUpdate();
 
@@ -383,7 +416,77 @@ public class PigScriptMigrationImplementation {
 
   }
 
-  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+  public void createDirPigScriptSecured(final String dir, final String namenodeuri,final
String username,final String principalName)
+    throws IOException, URISyntaxException {
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.security.authentication", "Kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+        public Boolean run() throws Exception {
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          Boolean b = fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
+          return b;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Exception in Webhdfs", e);
+    }
+  }
+
+  public void createDirPigScript(final String dir, final String namenodeuri,final String
username)
+    throws IOException, URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          fs.setOwner(src,username,"hadoop");
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs: ", e);
+    }
+  }
+
+
+
+
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri,final
String username)
     throws IOException {
 
     try {
@@ -430,6 +533,7 @@ public class PigScriptMigrationImplementation {
           }
           in.close();
           out.close();
+          fileSystem.setOwner(path,username,"hadoop");
           fileSystem.close();
           return null;
         }
@@ -440,10 +544,11 @@ public class PigScriptMigrationImplementation {
 
   }
 
-  public void putFileinHdfsSecured(final String source, final String dest, final String namenodeuri)
+  public void putFileinHdfsSecured(final String source, final String dest, final String namenodeuri,final
String username,final String principalName)
     throws IOException {
 
     try {
+
       final Configuration conf = new Configuration();
 
       conf.set("fs.hdfs.impl",
@@ -453,12 +558,11 @@ public class PigScriptMigrationImplementation {
         org.apache.hadoop.fs.LocalFileSystem.class.getName()
       );
       conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
       conf.set("hadoop.security.authentication", "Kerberos");
-
       UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
+      UserGroupInformation proxyUser ;
+      proxyUser = UserGroupInformation.createRemoteUser(principalName);
+      UserGroupInformation ugi = UserGroupInformation.createProxyUser("hdfs", proxyUser);
       ugi.doAs(new PrivilegedExceptionAction<Void>() {
 
         public Void run() throws Exception {
@@ -478,7 +582,7 @@ public class PigScriptMigrationImplementation {
           if (fileSystem.exists(path)) {
 
           }
-          //	Path pathsource = new Path(source);
+
           FSDataOutputStream out = fileSystem.create(path);
 
           InputStream in = new BufferedInputStream(
@@ -491,12 +595,13 @@ public class PigScriptMigrationImplementation {
           }
           in.close();
           out.close();
+          fileSystem.setOwner(path,username,"hadoop");
           fileSystem.close();
           return null;
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs Exception: ", e);
+      logger.error("Webhdfs exception", e);
 
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
index 44e27c1..00e1c6c 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
@@ -34,6 +34,7 @@ import org.apache.log4j.Logger;
 
 import java.beans.PropertyVetoException;
 import java.io.IOException;
+import java.net.URISyntaxException;
 import java.sql.Connection;
 import java.sql.SQLException;
 import java.text.ParseException;
@@ -98,11 +99,11 @@ public class PigScriptMigrationUtility {
       ambaridatabase = new OracleQuerySetAmbariDB();
     }
 
-    int maxcountforsavequery = 0, maxcountforpigsavedscript;
+    int maxcountforsavequery = 0, maxcountforpigsavedscript = 0;
     String time = null, timetobeInorder = null;
     Long epochTime = null;
     String dirNameForPigScript, completeDirandFilePath, pigscriptFilename = "";
-    int pigInstanceTableName;
+    int pigInstanceTableName, sequence;
 
     ArrayList<PigModel> dbpojoPigSavedscript = new ArrayList<PigModel>();
 
@@ -132,10 +133,15 @@ public class PigScriptMigrationUtility {
 
         connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),
view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();//
connecting to ambari db
         connectionAmbaridb.setAutoCommit(false);
+
         logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
 
         //for each pig script found in Hue Database
 
+        pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(connectionAmbaridb,
instance, ambaridatabase);// finding the table name in ambari from the given instance
+
+        sequence = pigsavedscriptmigration.fetchSequenceno(connectionAmbaridb, pigInstanceTableName,
ambaridatabase);
+
         for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
 
 
@@ -151,17 +157,15 @@ public class PigScriptMigrationUtility {
           logger.info("________________");
           logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
 
-          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(connectionAmbaridb,
instance, ambaridatabase);// finding the table name in ambari from the given instance
-
-          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(connectionAmbaridb,
pigInstanceTableName, ambaridatabase) + 1);// maximum count of the primary key of pig Script
table
-
           time = pigsavedscriptmigration.getTime();
 
           timetobeInorder = pigsavedscriptmigration.getTimeInorder();
 
           epochTime = pigsavedscriptmigration.getEpochTime();
 
-          dirNameForPigScript = "/user/admin/pig/scripts/";
+          maxcountforpigsavedscript = i + sequence + 1;
+
+          dirNameForPigScript = "/user/"+username+"/pig/scripts/";
 
           pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
 
@@ -169,12 +173,16 @@ public class PigScriptMigrationUtility {
 
           pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(),
dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfigurationCheckImplementation.getHomeDir(),
pigscriptFilename);
 
-          pigsavedscriptmigration.insertRowForPigScript(completeDirandFilePath, maxcountforsavequery,
maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(),
connectionAmbaridb, pigInstanceTableName, instance, i, ambaridatabase);
+          pigsavedscriptmigration.insertRowForPigScript(completeDirandFilePath, maxcountforsavequery,
maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(),
connectionAmbaridb, pigInstanceTableName, instance, i, ambaridatabase, username);
 
           if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-            pigsavedscriptmigration.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir()
+ pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+
+            pigsavedscriptmigration.createDirPigScriptSecured(dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));
+            pigsavedscriptmigration.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir()
+ pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"),username,view.getProperties().get("PrincipalUserName"));
           } else {
-            pigsavedscriptmigration.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir()
+ pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+
+            pigsavedscriptmigration.createDirPigScript(dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"),username);
+            pigsavedscriptmigration.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir()
+ pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"),username);
           }
 
           logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
@@ -182,8 +190,10 @@ public class PigScriptMigrationUtility {
           pigsavedscriptmigration.deletePigScriptLocalFile(ConfigurationCheckImplementation.getHomeDir(),
pigscriptFilename);
 
         }
+        pigsavedscriptmigration.updateSequenceno(connectionAmbaridb, maxcountforpigsavedscript,
pigInstanceTableName, ambaridatabase);
         connectionAmbaridb.commit();
 
+
       }
 
 
@@ -201,6 +211,8 @@ public class PigScriptMigrationUtility {
       logger.error("ParseException: ", e);
     } catch (PropertyVetoException e) {
       logger.error("PropertyVetoException: ", e);
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
     } finally {
       if (null != connectionAmbaridb)
         try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
index f765e15..b341474 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
@@ -34,25 +34,25 @@ public class MigrationModel implements Serializable,PersonalResource{
   private String intanceName="";
   private String userNameofhue="";
   private int totalNoQuery;
-  private String ProgressPercentage="";
+  private String progressPercentage="";
   private String owner = "";
-  private Boolean IfSuccess;
-  private String TimeTakentotransfer="";
+  private Boolean ifSuccess;
+  private String timeTakentotransfer="";
 
   public String getTimeTakentotransfer() {
-    return TimeTakentotransfer;
+    return timeTakentotransfer;
   }
 
   public void setTimeTakentotransfer(String timeTakentotransfer) {
-    TimeTakentotransfer = timeTakentotransfer;
+    timeTakentotransfer = timeTakentotransfer;
   }
 
   public Boolean getIfSuccess() {
-    return IfSuccess;
+    return ifSuccess;
   }
 
   public void setIfSuccess(Boolean ifSuccess) {
-    IfSuccess = ifSuccess;
+    ifSuccess = ifSuccess;
   }
 
   public MigrationModel(Map<String, Object> stringObjectMap) throws InvocationTargetException,
IllegalAccessException {
@@ -113,11 +113,11 @@ public class MigrationModel implements Serializable,PersonalResource{
   }
 
   public String getProgressPercentage() {
-    return ProgressPercentage;
+    return progressPercentage;
   }
 
   public void setProgressPercentage(String progressPercentage) {
-    ProgressPercentage = progressPercentage;
+    progressPercentage = progressPercentage;
   }
 
   public String getOwner() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
index 1f4d2c6..e6e466f 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
@@ -27,4 +27,4 @@ export default Model.extend({
   totalTimeTaken: DS.attr('string'),
   jobtype: DS.attr('string'),
   isNoQuerySelected: DS.attr('string')
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
index 5d55773..84c30c7 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
@@ -20,4 +20,4 @@ import DS from 'ember-data';
 
 export default Model.extend({
   idforJob: DS.attr('string')
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
index 5913726..f2f57b4 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
@@ -20,4 +20,4 @@ import DS from 'ember-data';
 
 export default Model.extend({
   progressPercentage: DS.attr('string')
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
index a47b93f..158a154 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
@@ -17,6 +17,8 @@
  */
 import Ember from 'ember';
 
+
+
 export default Ember.Route.extend({
 
   model: function() {
@@ -31,6 +33,11 @@ export default Ember.Route.extend({
 
   actions: {
     submitResult: function() {
+
+     if(this.controller.get('usernamehue')===undefined || this.controller.get('instancename')
===undefined){
+        alert("Mandatory fields can not left blank");
+     }
+     else{
       this.controller.set('jobstatus', null);
       this.controller.set('progressBar', null);
       this.controller.set('completionStatus', null);
@@ -61,6 +68,7 @@ export default Ember.Route.extend({
           repeat.progresscheck(jobid);
         });
       });
+      }
     }
   },
   progresscheck: function(jobid) {
@@ -80,6 +88,7 @@ export default Ember.Route.extend({
         var userNameofhue = progress.get('userNameofhue');
         var totalTimeTaken = progress.get('totalTimeTaken');
         var isNoQuerySelected = progress.get('isNoQuerySelected');
+        console.log("the progress percentage is="+progressPercentage);
 
         if (progressPercentage !== '100' && isNoQuerySelected === 'no') {
           control.set('progressBar', progressPercentage);
@@ -103,4 +112,4 @@ export default Ember.Route.extend({
       });
     }, 500);
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
index d50a402..2006e8d 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
@@ -31,6 +31,11 @@ export default Ember.Route.extend({
 
   actions: {
     submitResult: function() {
+
+    if(this.controller.get('usernamehue')===undefined || this.controller.get('instancename')
===undefined){
+      alert("Mandatory fields can not left blank");
+    }
+    else{
       this.controller.set('jobstatus', null);
       this.controller.set('progressBar', null);
       this.controller.set('completionStatus', null);
@@ -61,6 +66,7 @@ export default Ember.Route.extend({
         });
       });
     }
+   }
   },
   progresscheck: function(jobid) {
     var repeat = this;
@@ -99,4 +105,4 @@ export default Ember.Route.extend({
       });
     }, 500);
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
index debb1eb..d2dfd39 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
@@ -31,6 +31,11 @@ export default Ember.Route.extend({
 
   actions: {
     submitResult: function() {
+
+    if(this.controller.get('usernamehue')===undefined || this.controller.get('instancename')
===undefined){
+      alert("Mandatory fields can not left blank");
+    }
+    else{
       this.controller.set('jobstatus', null);
       this.controller.set('progressBar', null);
       this.controller.set('completionStatus', null);
@@ -61,6 +66,7 @@ export default Ember.Route.extend({
         });
       });
     }
+   }
   },
   progresscheck: function(jobid) {
     var repeat = this;
@@ -99,4 +105,4 @@ export default Ember.Route.extend({
       });
     }, 500);
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
index ab0b919..c121f63 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
@@ -31,6 +31,11 @@ export default Ember.Route.extend({
 
   actions: {
     submitResult: function() {
+    if(this.controller.get('usernamehue')===undefined || this.controller.get('instancename')
===undefined){
+      alert("Mandatory fields can not left blank");
+    }
+    else
+    {
       this.controller.set('jobstatus', null);
       this.controller.set('progressBar', null);
       this.controller.set('completionStatus', null);
@@ -61,6 +66,7 @@ export default Ember.Route.extend({
         });
       });
     }
+    }
   },
   progresscheck: function(jobid) {
     var repeat = this;
@@ -100,4 +106,4 @@ export default Ember.Route.extend({
       });
     }, 500);
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
index b34b5ed..06c7542 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
@@ -29,6 +29,11 @@ export default Ember.Route.extend({
 
   actions: {
     submitResult: function() {
+
+    if(this.controller.get('instancename') ===undefined){
+      alert("Mandatory fields can not left blank");
+    }
+    else{
       this.controller.set('jobstatus', null);
       this.controller.set('progressBar', null);
       this.controller.set('completionStatus', null);
@@ -46,11 +51,12 @@ export default Ember.Route.extend({
           revertdate: control.get('revertdate'),
           jobid: jobid
         });
-        hivehistoryqueryjobstart.then(function() {  
+        hivehistoryqueryjobstart.then(function() {
           control.set('jobstatus', "0");
           repeat.progresscheck(jobid);
         });
       });
+     }
     }
   },
 
@@ -94,4 +100,4 @@ export default Ember.Route.extend({
     }, 500);
   }
 
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a34fe1d/contrib/views/hueambarimigration/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/view.xml b/contrib/views/hueambarimigration/src/main/resources/view.xml
index dcfa943..3abbd72 100644
--- a/contrib/views/hueambarimigration/src/main/resources/view.xml
+++ b/contrib/views/hueambarimigration/src/main/resources/view.xml
@@ -122,10 +122,17 @@ limitations under the License. Kerberos, LDAP, Custom. Binary/Htt
   <parameter>
     <name>KerberoseEnabled</name>
     <description>Kerberose enable</description>
-    <label>Kerberose enabled on Ambari cluster?(y/n)</label>
+    <label>Kerberos enabled on Ambari cluster?(y/n)</label>
     <placeholder>y/n</placeholder>
   </parameter>
 
+  <parameter>
+    <name>PrincipalUserName</name>
+    <description>Kerberos enable</description>
+    <label>principal name (if kerberos is enabled)</label>
+    <placeholder>Please enter the principal name is kerberos is enabled</placeholder>
+  </parameter>
+
 
   <resource>
     <name>ambaridatabases</name>


Mime
View raw message