ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dbhowm...@apache.org
Subject [04/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
Date Thu, 30 Jun 2016 06:55:34 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java
new file mode 100644
index 0000000..d7f2868
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+
+public class InstanceModel {
+
+     String instanceName;
+     int id;
+
+     public String getInstanceName() {
+          return instanceName;
+     }
+
+     public void setInstanceName(String instanceName) {
+          this.instanceName = instanceName;
+     }
+
+     public int getId() {
+          return id;
+     }
+
+     public void setId(int id) {
+          this.id = id;
+     }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java
new file mode 100644
index 0000000..1a247bb
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+/**
+ * Created by temp on 5/19/16.
+ */
+public class JobReturnIdModel {
+
+  int id;
+  String idforJob;
+
+  public String getIdforJob() {
+    return idforJob;
+  }
+
+  public void setIdforJob(String idforJob) {
+    this.idforJob = idforJob;
+  }
+
+  public int getId() {
+    return id;
+  }
+
+  public void setId(int id) {
+    this.id = id;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
new file mode 100644
index 0000000..f765e15
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+public class MigrationModel implements Serializable,PersonalResource{
+
+
+
+  private String id;
+
+  private int numberOfQueryTransfered;
+  private String intanceName="";
+  private String userNameofhue="";
+  private int totalNoQuery;
+  private String ProgressPercentage="";
+  private String owner = "";
+  private Boolean IfSuccess;
+  private String TimeTakentotransfer="";
+
+  public String getTimeTakentotransfer() {
+    return TimeTakentotransfer;
+  }
+
+  public void setTimeTakentotransfer(String timeTakentotransfer) {
+    TimeTakentotransfer = timeTakentotransfer;
+  }
+
+  public Boolean getIfSuccess() {
+    return IfSuccess;
+  }
+
+  public void setIfSuccess(Boolean ifSuccess) {
+    IfSuccess = ifSuccess;
+  }
+
+  public MigrationModel(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  public MigrationModel() {
+
+  }
+
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public int hashCode() {
+    return id.hashCode();
+  }
+
+
+
+
+  public String getIntanceName() {
+    return intanceName;
+  }
+
+  public void setIntanceName(String intanceName) {
+    this.intanceName = intanceName;
+  }
+
+  public String getUserNameofhue() {
+    return userNameofhue;
+  }
+
+  public void setUserNameofhue(String userNameofhue) {
+    this.userNameofhue = userNameofhue;
+  }
+
+  public int getNumberOfQueryTransfered() {
+    return numberOfQueryTransfered;
+  }
+
+  public void setNumberOfQueryTransfered(int numberOfQueryTransfered) {
+    this.numberOfQueryTransfered = numberOfQueryTransfered;
+  }
+
+  public int getTotalNoQuery() {
+    return totalNoQuery;
+  }
+
+  public void setTotalNoQuery(int totalNoQuery) {
+    this.totalNoQuery = totalNoQuery;
+  }
+
+  public String getProgressPercentage() {
+    return ProgressPercentage;
+  }
+
+  public void setProgressPercentage(String progressPercentage) {
+    ProgressPercentage = progressPercentage;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
new file mode 100644
index 0000000..d3cd5b9
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.Map;
+
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.PersonalResource;
+import org.apache.commons.beanutils.BeanUtils;
+
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Date;
+import java.util.Map;
+
+public class MigrationResponse implements Serializable,PersonalResource{
+
+
+
+  private String id;
+
+  private int numberOfQueryTransfered;
+  private String intanceName="";
+  private String userNameofhue="";
+  private int totalNoQuery;
+  private int ProgressPercentage;
+  private String owner = "";
+  private String totalTimeTaken="";
+  private String jobtype="";
+  private String isNoQuerySelected="";
+
+  public String getTotalTimeTaken() {
+    return totalTimeTaken;
+  }
+
+  public void setTotalTimeTaken(String totalTimeTaken) {
+    this.totalTimeTaken = totalTimeTaken;
+  }
+
+  public String getIsNoQuerySelected() {
+    return isNoQuerySelected;
+  }
+
+  public void setIsNoQuerySelected(String isNoQuerySelected) {
+    this.isNoQuerySelected = isNoQuerySelected;
+  }
+
+  public String getJobtype() {
+    return jobtype;
+  }
+
+  public void setJobtype(String jobtype) {
+    this.jobtype = jobtype;
+  }
+
+  public MigrationResponse(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
+    BeanUtils.populate(this, stringObjectMap);
+  }
+
+  public MigrationResponse() {
+
+  }
+
+
+  public int getTotalNoQuery() {
+    return totalNoQuery;
+  }
+
+  public void setTotalNoQuery(int totalNoQuery) {
+    this.totalNoQuery = totalNoQuery;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public int hashCode() {
+    return id.hashCode();
+  }
+
+  public int getNumberOfQueryTransfered() {
+    return numberOfQueryTransfered;
+  }
+
+  public void setNumberOfQueryTransfered(int numberOfQueryTransfered) {
+    this.numberOfQueryTransfered = numberOfQueryTransfered;
+  }
+
+  public String getIntanceName() {
+    return intanceName;
+  }
+
+  public void setIntanceName(String intanceName) {
+    this.intanceName = intanceName;
+  }
+
+  public String getUserNameofhue() {
+    return userNameofhue;
+  }
+
+  public void setUserNameofhue(String userNameofhue) {
+    this.userNameofhue = userNameofhue;
+  }
+
+
+  public int getProgressPercentage() {
+    return ProgressPercentage;
+  }
+
+  public void setProgressPercentage(int progressPercentage) {
+    ProgressPercentage = progressPercentage;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java
new file mode 100644
index 0000000..5bf6499
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+import java.util.Date;
+
+public class PigModel {
+
+  private Date dt;
+  private String script;
+  private String Status;
+  private String title;
+  private String dir;
+
+  public Date getDt() {
+    return dt;
+  }
+
+  public void setDt(Date dt) {
+    this.dt = dt;
+  }
+
+  public String getScript() {
+    return script;
+  }
+
+  public void setScript(String script) {
+    this.script = script;
+  }
+
+  public String getStatus() {
+    return Status;
+  }
+
+  public void setStatus(String status) {
+    Status = status;
+  }
+
+  public String getTitle() {
+    return title;
+  }
+
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  public String getDir() {
+    return dir;
+  }
+
+  public void setDir(String dir) {
+    this.dir = dir;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java
new file mode 100644
index 0000000..0c66e17
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+
+public class ProgressCheckModel {
+
+  private int progressPercentage;
+  private int noOfQueryCompleted;
+  private int noOfQueryLeft;
+  private int totalNoOfQuery;
+
+  public int getProgressPercentage() {
+    return progressPercentage;
+  }
+
+  public void setProgressPercentage(int progressPercentage) {
+    this.progressPercentage = progressPercentage;
+  }
+
+  public int getNoOfQueryCompleted() {
+    return noOfQueryCompleted;
+  }
+
+  public void setNoOfQueryCompleted(int noOfQueryCompleted) {
+    this.noOfQueryCompleted = noOfQueryCompleted;
+  }
+
+  public int getNoOfQueryLeft() {
+    return noOfQueryLeft;
+  }
+
+  public void setNoOfQueryLeft(int noOfQueryLeft) {
+    this.noOfQueryLeft = noOfQueryLeft;
+  }
+
+  public int getTotalNoOfQuery() {
+    return totalNoOfQuery;
+  }
+
+  public void setTotalNoOfQuery(int totalNoOfQuery) {
+    this.totalNoOfQuery = totalNoOfQuery;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java
new file mode 100644
index 0000000..6ba651c
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
+
+
+
+public class UserModel {
+
+  String username;
+  int id;
+
+  public String getUsername() {
+    return username;
+  }
+
+  public void setUsername(String username) {
+    this.username = username;
+  }
+
+  public int getId() {
+    return id;
+  }
+
+  public void setId(int id) {
+    this.id = id;
+  }
+}
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
deleted file mode 100644
index ac76e1c..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.service.configurationcheck;
-
-import java.io.BufferedReader;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.*;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.util.Properties;
-import javax.ws.rs.core.Context;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.AmbariStreamProvider;
-import org.apache.ambari.view.URLStreamProvider;
-
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
-import org.apache.log4j.Logger;
-
-public class ConfFileReader {
-
-  static final Logger logger = Logger.getLogger(ConfFileReader.class);
-
-  private static String homeDir = System.getProperty("java.io.tmpdir")+"/";
-
-  public static boolean checkConfigurationForHue(String hueURL) {
-
-    URL url = null;
-    int resonseCode = 0;
-    try {
-      url = new URL(hueURL);
-      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-      connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
-      connection.connect();
-      resonseCode = connection.getResponseCode();
-
-
-    } catch (MalformedURLException e) {
-
-      logger.error("Error in accessing the URL:" , e);
-
-    } catch (ProtocolException e) {
-
-      logger.error("Error in protocol: ", e);
-    } catch (IOException e) {
-
-      logger.error("IO Exception while establishing connection:",e);
-    }
-
-    return resonseCode == 200 ;
-  }
-
-  public static boolean checkConfigurationForAmbari(String ambariURL) {
-
-
-    URL url = null;
-    int responseCode = 0;
-    try {
-      url = new URL(ambariURL);
-      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-      connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
-      connection.connect();
-      responseCode = connection.getResponseCode();
-
-    } catch (MalformedURLException e) {
-      logger.error("Error in accessing the URL: " , e);
-
-    } catch (ProtocolException e) {
-      logger.error("Error in protocol: ", e);
-    } catch (IOException e) {
-      logger.error("IO Exception while establishing connection: ",e);
-    }
-    return responseCode == 200 ;
-
-
-  }
-
-  public static boolean checkHueDatabaseConnection(String hueDBDRiver, String hueJdbcUrl, String huedbUsername, String huedbPassword) throws IOException {
-
-    try {
-      Connection con = DataSourceHueDatabase.getInstance(hueDBDRiver, hueJdbcUrl, huedbUsername, huedbPassword).getConnection();
-    }
-    catch (Exception e) {
-
-      logger.error("Sql exception in acessing Hue Database: " ,e);
-      return false;
-    }
-
-    return true;
-
-  }
-
-  public static boolean checkAmbariDatbaseConection(String ambariDBDriver, String ambariDBJdbcUrl, String ambariDbUsername, String ambariDbPassword) throws IOException {
-
-
-    try {
-
-      Connection con = DataSourceAmbariDatabase.getInstance(ambariDBDriver, ambariDBJdbcUrl, ambariDbUsername, ambariDbPassword).getConnection();
-
-
-    } catch (Exception e) {
-
-      logger.error("Sql exception in acessing Ambari Database: " ,e);
-
-      return false;
-    }
-
-    return true;
-
-  }
-
-  public static String getHomeDir() {
-    return homeDir;
-  }
-
-  public static void setHomeDir(String homeDir) {
-    ConfFileReader.homeDir = homeDir;
-  }
-
-  public static boolean checkNamenodeURIConnectionforambari(String ambariServerNameNode) throws IOException, URISyntaxException {
-
-
-    Configuration conf = new Configuration();
-    conf.set("fs.hdfs.impl",
-      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
-    conf.set("fs.file.impl",
-      org.apache.hadoop.fs.LocalFileSystem.class.getName()
-    );
-
-    FileSystem fileSystem = FileSystem.get(new URI(ambariServerNameNode), conf);
-
-
-    if (fileSystem instanceof WebHdfsFileSystem) {
-
-      return true;
-
-    } else {
-
-      return false;
-    }
-
-
-  }
-
-  public static boolean checkNamenodeURIConnectionforHue(String hueServerNamenodeURI) throws IOException, URISyntaxException {
-
-    Configuration conf = new Configuration();
-    conf.set("fs.hdfs.impl",
-      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-    );
-    conf.set("fs.file.impl",
-      org.apache.hadoop.fs.LocalFileSystem.class.getName()
-    );
-
-    FileSystem fileSystem = FileSystem.get(new URI(hueServerNamenodeURI), conf);
-
-
-    if (fileSystem instanceof WebHdfsFileSystem) {
-
-      return true;
-    } else {
-
-      return false;
-    }
-
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
deleted file mode 100644
index c959e8a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
+++ /dev/null
@@ -1,562 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.service.hive;
-
-import java.net.URISyntaxException;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.BufferedInputStream;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.Scanner;
-
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-
-public class HiveHistoryQueryImpl {
-
-  static final Logger logger = Logger.getLogger(HiveHistoryQueryImpl.class);
-
-  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
-
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-
-    XMLOutputter xmlOutput = new XMLOutputter();
-    xmlOutput.setFormat(Format.getPrettyFormat());
-
-    File xmlfile = new File("/var/lib/huetoambari/RevertChange.xml");
-
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-        Element rootNode = doc.getRootElement();
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
-
-      } catch (JDOMException e) {
-        logger.error("JDOMException" ,e);
-
-      }
-
-    } else {
-
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        doc.getRootElement().addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
-      } catch (IOException io) {
-        logger.error("JDOMException" , io);
-      }
-
-    }
-
-  }
-
-  public int fetchMaximumIdfromAmbaridb(String driverName, Connection c, int id) throws SQLException {
-
-    String ds_id = null;
-    Statement stmt = null;
-    stmt = c.createStatement();
-    ResultSet rs = null;
-
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
-    }
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-  public void insertRowinAmbaridb(String driverName, String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i) throws SQLException, IOException {
-
-    String maxcount1 = Integer.toString(maxcount);
-    String epochtime1 = Long.toString(epochtime);
-    String ds_id = new String();
-    Statement stmt = null;
-    String sql = "";
-    String revsql = "";
-    stmt = c.createStatement();
-
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount1 + "';";
-
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "';";
-
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet')";
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "'";
-
-    }
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-
-    stmt.executeUpdate(sql);
-
-  }
-
-  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    stmt = c.createStatement();
-    ResultSet rs = null;
-
-    if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
-    } else {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
-    }
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-  }
-
-  public long getEpochTime() throws ParseException {
-    long seconds = System.currentTimeMillis() / 1000l;
-    return seconds;
-
-  }
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-    return s;
-
-  }
-
-  public String[] fetchFromHue(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, SQLException {
-    int id = 0;
-    int i = 0;
-    String[] query = new String[100];
-
-    try {
-      connection.setAutoCommit(false);
-      Statement statement = connection.createStatement();
-
-      ResultSet rs1 = null;
-      if (username.equals("all")) {
-      } else {
-        ResultSet rs = statement.executeQuery("select id from auth_user where username='" + username + "';");
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory;");
-        } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + ";");
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "') AND submission_date < date('" + endtime + "');");
-        } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "') AND submission_date <= date('" + endtime + "');");
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "');");
-        } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "');");
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date < date('" + endtime + "');");
-        } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date < date('" + endtime + "');");
-        }
-      }
-
-
-      while (rs1.next()) {
-        query[i] = rs1.getString("query");
-        i++;
-      }
-
-      connection.commit();
-
-    } catch (SQLException e) {
-      connection.rollback();
-
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("Sql exception error: " + e);
-      }
-    }
-    return query;
-
-  }
-
-  public void writetoFileQueryhql(String content, String homedir) {
-    try {
-      File file = new File(homedir + "query.hql");
-      // if file doesnt exists, then create it
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-    } catch (IOException e) {
-      logger.error("IOException" , e);
-    }
-
-  }
-
-  public void deleteFileQueryhql(String homedir) {
-    try{
-      File file = new File(homedir + "query.hql");
-
-      if(file.delete()){
-        logger.info("temporary hql file deleted");
-      }else{
-        logger.info("temporary hql file delete failed");
-      }
-
-    }catch(Exception e){
-
-     logger.error("File Exception ",e);
-
-    }
-
-  }
-
-  public void deleteFileQueryLogs(String homedir) {
-    try{
-      File file = new File(homedir + "logs");
-
-      if(file.delete()){
-        logger.info("temporary logs file deleted");
-      }else{
-        logger.info("temporary logs file delete failed");
-      }
-
-    }catch(Exception e){
-
-      logger.error("File Exception ",e);
-
-    }
-
-  }
-
-  public void writetoFileLogs(String homedir) {
-    try {
-      String content = "";
-      File file = new File(homedir + "logs");
-      // if file doesnt exists, then create it
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-    } catch (IOException e) {
-      logger.error("IOException" , e);
-    }
-
-  }
-
-  public void createDir(final String dir, final String namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      UserGroupInformation.setConfiguration(conf);
-
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-
-        public Boolean run() throws Exception {
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          Boolean b = fs.mkdirs(src);
-          return b;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Exception in Webhdfs" , e);
-    }
-  }
-
-  public void createDirKerberorisedSecured(final String dir, final String namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-
-        public Boolean run() throws Exception {
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          Boolean b = fs.mkdirs(src);
-          return b;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Exception in Webhdfs" , e);
-    }
-  }
-
-
-  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
-    throws IOException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-          );
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class.getName()
-          );
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          //	Path pathsource = new Path(source);
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
-    }
-
-  }
-
-  public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri)
-    throws IOException {
-
-    try {
-
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
-
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6e8637c/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
deleted file mode 100644
index 3ad481d..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
+++ /dev/null
@@ -1,778 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.service.hive;
-
-import java.nio.charset.Charset;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.Scanner;
-import java.io.*;
-import java.net.URISyntaxException;
-import java.net.URL;
-
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
-import org.apache.hadoop.security.UserGroupInformation;
-
-import org.apache.ambari.view.huetoambarimigration.model.*;
-
-public class HiveSavedQueryImpl {
-
-  static final Logger logger = Logger.getLogger(HiveSavedQueryImpl.class);
-
-  private static String readAll(Reader rd) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    int cp;
-    while ((cp = rd.read()) != -1) {
-      sb.append((char) cp);
-    }
-    return sb.toString();
-  }
-
-  public void wrtitetoalternatesqlfile(String dirname, String content,
-                                       String instance, int i) throws IOException {
-
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-
-    XMLOutputter xmlOutput = new XMLOutputter();
-
-    xmlOutput.setFormat(Format.getPrettyFormat());
-
-    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
-
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-
-        Element rootNode = doc.getRootElement();
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate
-          .toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
-
-      } catch (JDOMException e) {
-        // TODO Auto-generated catch block
-        logger.error("JDOMException: " , e);
-      }
-
-    } else {
-
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate
-          .toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        doc.getRootElement().addContent(record);
-
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
-
-      } catch (IOException io) {
-
-      }
-
-    }
-
-  }
-
-  public int fetchMaxidforSavedQueryHive(String driverName, Connection c, int id)
-    throws SQLException {
-
-    String ds_id = null;
-    Statement stmt = null;
-    stmt = c.createStatement();
-    ResultSet rs = null;
-
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max(cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
-    }
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-
-    return num;
-  }
-
-  public int fetchInstancetablenameForSavedqueryHive(String driverName, Connection c,
-                                                     String instance) throws SQLException {
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-
-    stmt = c.createStatement();
-    ResultSet rs = null;
-
-    if (driverName.contains("oracle")) {
-      rs = stmt
-        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
-          + instance + "'");
-    } else {
-      rs = stmt
-        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
-          + instance + "';");
-    }
-
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-
-    }
-
-    return id;
-  }
-
-  public int fetchInstanceTablenameHiveHistory(String driverName, Connection c,
-                                               String instance) throws SQLException {
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-
-
-    stmt = c.createStatement();
-    ResultSet rs = null;
-
-    if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
-    } else {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
-    }
-
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-      System.out.println("id is " + id);
-
-    }
-
-    return id;
-
-  }
-
-  public int fetchMaxdsidFromHiveHistory(String driverName, Connection c, int id)
-    throws SQLException {
-
-    String ds_id = null;
-    Statement stmt = null;
-
-    stmt = c.createStatement();
-    ResultSet rs = null;
-
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
-    }
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-
-  /**/
-  public void insertRowHiveHistory(String driverName, String dirname, int maxcount,
-                                   long epochtime, Connection c, int id, String instance, int i)
-    throws SQLException, IOException {
-    String maxcount1 = Integer.toString(maxcount);
-
-    String epochtime1 = Long.toString(epochtime);
-
-    String ds_id = new String();
-    Statement stmt = null;
-
-    stmt = c.createStatement();
-    String sql = "";
-    String revsql = "";
-
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-
-      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='"
-        + maxcount1 + "';";
-
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
-        + maxcount1 + "';";
-
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet')";
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
-        + maxcount1 + "'";
-
-    }
-    stmt.executeUpdate(sql);
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-  }
-
-  public void insertRowinSavedQuery(String driverName, int maxcount, String database,
-                                    String dirname, String query, String name, Connection c, int id,
-                                    String instance, int i) throws SQLException, IOException {
-    String maxcount1 = Integer.toString(maxcount);
-
-    String ds_id = new String();
-    Statement stmt = null;
-    String sql = "";
-    String revsql = "";
-    stmt = c.createStatement();
-
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_SAVEDQUERY_" + id + " values ('"
-        + maxcount1 + "','" + database + "','" + "admin" + "','"
-        + dirname + "query.hql','" + query + "','" + name + "');";
-
-      revsql = "delete from  DS_SAVEDQUERY_" + id + " where ds_id='"
-        + maxcount1 + "';";
-
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
-        + maxcount1 + "','" + database + "','" + "admin" + "','"
-        + dirname + "query.hql','" + query + "','" + name + "');";
-
-      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
-        + maxcount1 + "';";
-
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
-        + maxcount1 + "','" + database + "','" + "admin" + "','"
-        + dirname + "query.hql','" + query + "','" + name + "')";
-
-      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
-        + maxcount1 + "'";
-
-    }
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-    stmt.executeUpdate(sql);
-  }
-
-  public long getEpochTime() throws ParseException {
-
-    long seconds = System.currentTimeMillis() / 1000l;
-    return seconds;
-
-  }
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-
-    return s;
-
-  }
-
-  public ArrayList<PojoHive> fetchFromHuedb(String username,
-                                            String startdate, String endtime, Connection connection)
-    throws ClassNotFoundException, IOException {
-    int id = 0;
-    int i = 0;
-    String[] query = new String[100];
-    ArrayList<PojoHive> hiveArrayList = new ArrayList<PojoHive>();
-    ResultSet rs1 = null;
-
-    try {
-      Statement statement = connection.createStatement();
-      if (username.equals("all")) {
-      } else {
-        ResultSet rs = statement
-          .executeQuery("select id from auth_user where username='"
-            + username + "';");
-        while (rs.next()) {
-
-          id = rs.getInt("id");
-
-        }
-
-      }
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery;");
-
-        } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id + ";");
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date('"
-              + startdate
-              + "') AND mtime <= date('"
-              + endtime + "');");
-        } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id
-              + " AND mtime >= date('"
-              + startdate
-              + "') AND mtime <= date('"
-              + endtime
-              + "');");
-        }
-
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and  mtime >= date('"
-              + startdate + "');");
-        } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id
-              + " AND mtime >= date('"
-              + startdate
-              + "');");
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date('"
-              + endtime + "');");
-        } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id
-              + " AND mtime <= date('"
-              + endtime
-              + "');");
-        }
-
-      }
-      while (rs1.next()) {
-        PojoHive hivepojo = new PojoHive();
-        String name = rs1.getString("name");
-        String temp = rs1.getString("data");
-        InputStream is = new ByteArrayInputStream(temp.getBytes());
-        BufferedReader rd = new BufferedReader(new InputStreamReader(
-          is, Charset.forName("UTF-8")));
-        String jsonText = readAll(rd);
-        JSONObject json = new JSONObject(jsonText);
-        String resources = json.get("query").toString();
-        json = new JSONObject(resources);
-
-        String resarr = (json.get("query")).toString();
-
-        json = new JSONObject(resources);
-        String database = (json.get("database")).toString();
-        hivepojo.setQuery(resarr);
-        hivepojo.setDatabase(database);
-        hivepojo.setOwner(name);
-        hiveArrayList.add(hivepojo);
-        i++;
-      }
-
-    } catch (SQLException e) {
-      // if the error message is "out of memory",
-      // it probably means no database file is found
-      System.err.println(e.getMessage());
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("sql connection exception" , e);
-      }
-    }
-
-    return hiveArrayList;
-
-  }
-
-
-  public void writetoFilequeryHql(String content, String homedir) {
-    try {
-      File file = new File(homedir + "query.hql");
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-
-    } catch (IOException e) {
-      logger.error("IOException: " , e);
-    }
-
-  }
-
-  public void deleteFileQueryhql(String homedir) {
-    try{
-      File file = new File(homedir + "query.hql");
-
-      if(file.delete()){
-        logger.info("temporary hql file deleted");
-      }else{
-        logger.info("temporary hql file delete failed");
-      }
-
-    }catch(Exception e){
-
-      logger.error("File Exception ",e);
-
-    }
-
-  }
-
-  public void deleteFileQueryLogs(String homedir) {
-    try{
-      File file = new File(homedir + "logs");
-
-      if(file.delete()){
-        logger.info("temporary logs file deleted");
-      }else{
-        logger.info("temporary logs file delete failed");
-      }
-
-    }catch(Exception e){
-
-      logger.error("File Exception ",e);
-
-    }
-
-  }
-
-
-  public void writetoFileLogs(String homedir) {
-    try {
-
-      String content = "";
-      File file = new File(homedir + "logs");
-
-      // if file doesnt exists, then create it
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-
-    } catch (IOException e) {
-      logger.error("IOException: " , e);
-    }
-
-  }
-
-  public void createDirHive(final String dir, final String namenodeuri)
-    throws IOException, URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs: " , e);
-    }
-  }
-
-  public void createDirHiveSecured(final String dir, final String namenodeuri)
-    throws IOException, URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs: " , e);
-    }
-  }
-
-  public void putFileinHdfs(final String source, final String dest,
-                            final String namenodeuri) throws IOException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystem = FileSystem.get(conf);
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
-    }
-
-  }
-
-
-  public void putFileinHdfsSecured(final String source, final String dest,
-                                   final String namenodeuri) throws IOException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          // Path pathsource = new Path(source);
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-
-
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
-    }
-
-  }
-
-}


Mime
View raw message