airavata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chath...@apache.org
Subject [3/5] airavata git commit: fixing compilation issues
Date Wed, 24 Jun 2015 14:15:04 GMT
http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index 55bb987..175351f 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -1,225 +1,225 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.*;
-
-/**
- * This handler will copy outputs from airavata installed local directory
- * to a remote location, prior to this handler SCPOutputHandler should be invoked
- * Should add following configuration to gfac-config.xml and configure the keys properly
- * <Handler class="AdvancedSCPOutputHandler">
-                            <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
-                            <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
-                        <property name="userName" value="airavata"/>
-                        <property name="hostName" value="gw98.iu.xsede.org"/>
-                        <property name="outputPath" value="/home/airavata/outputData"/>
-                        <property name="passPhrase" value="/home/airavata/outputData"/>
-                        <property name="password" value="/home/airavata/outputData"/>
-
- */
-public class AdvancedSCPOutputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPOutputHandler.class);
-
-    public static final int DEFAULT_SSH_PORT = 22;
-
-    private String password = null;
-
-    private String publicKeyPath;
-
-    private String passPhrase;
-
-    private String privateKeyPath;
-
-    private String userName;
-
-    private String hostName;
-
-    private String outputPath;
-
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-        password = (String)properties.get("password");
-        passPhrase = (String)properties.get("passPhrase");
-        privateKeyPath = (String)properties.get("privateKeyPath");
-        publicKeyPath = (String)properties.get("publicKeyPath");
-        userName = (String)properties.get("userName");
-        hostName = (String)properties.get("hostName");
-        outputPath = (String)properties.get("outputPath");
-    }
-
-    @Override
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-    	RemoteCluster remoteCluster = null;
-        AuthenticationInfo authenticationInfo = null;
-        if (password != null) {
-            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-        } else {
-            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-                    this.passPhrase);
-        }
-        try {
-            String hostName = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostName) == null) {
-                try {
-                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
-                } catch (ApplicationSettingsException e) {
-                    log.error(e.getMessage());
-                    try {
-                        StringWriter errors = new StringWriter();
-                        e.printStackTrace(new PrintWriter(errors));
-         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-         			} catch (GFacException e1) {
-         				 log.error(e1.getLocalizedMessage());
-         			}
-                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                }
-            }
-            String standardError = jobExecutionContext.getStandardError();
-            String standardOutput = jobExecutionContext.getStandardOutput();
-            super.invoke(jobExecutionContext);
-            // Server info
-            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){
-                try{
-                    URL outputPathURL = new URL(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir());
-                    this.userName = outputPathURL.getUserInfo();
-                    this.hostName = outputPathURL.getHost();
-                    outputPath = outputPathURL.getPath();
-                } catch (MalformedURLException e) {
-                    log.error(e.getLocalizedMessage(),e);
-                }
-            }
-            String key = GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-            remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && !jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().isPersistOutputData()){
-            outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
-                    + File.separator;
-                remoteCluster.makeDirectory(outputPath);
-            }
-            remoteCluster.scpTo(outputPath, standardError);
-            remoteCluster.scpTo(outputPath, standardOutput);
-            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-            Set<String> keys = output.keySet();
-            for (String paramName : keys) {
-                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
-                if (outputDataObjectType.getType() == DataType.URI) {
-                    // for failed jobs outputs are not generated. So we should not download outputs
-                    if (GFacUtils.isFailedJob(jobExecutionContext)){
-                        continue;
-                    }
-                	String downloadFile = outputDataObjectType.getValue();
-                    if(downloadFile == null || !(new File(downloadFile).isFile())){
-                        GFacUtils.saveErrorDetails(jobExecutionContext, "Empty Output returned from the application", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                		throw new GFacHandlerException("Empty Output returned from the application.." );
-                	}
-                	remoteCluster.scpTo(outputPath, downloadFile);
-                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
-                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setName(paramName);
-                    dataObjectType.setType(DataType.URI);
-                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-                    outputArray.add(dataObjectType);
-                }else if (outputDataObjectType.getType() == DataType.STDOUT) {
-                    remoteCluster.scpTo(outputPath, standardOutput);
-                    String fileName = standardOutput.substring(standardOutput.lastIndexOf(File.separatorChar)+1, standardOutput.length());
-                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setName(paramName);
-                    dataObjectType.setType(DataType.STDOUT);
-                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-                    outputArray.add(dataObjectType);
-                }else if (outputDataObjectType.getType() == DataType.STDERR) {
-                    remoteCluster.scpTo(outputPath, standardError);
-                    String fileName = standardError.substring(standardError.lastIndexOf(File.separatorChar)+1, standardError.length());
-                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setName(paramName);
-                    dataObjectType.setType(DataType.STDERR);
-                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-                    outputArray.add(dataObjectType);
-                }
-             }
-           experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-        } catch (SSHApiException e) {
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-			} catch (GFacException e1) {
-				 log.error(e1.getLocalizedMessage());
-			}
-            log.error("Error transfering files to remote host : " + hostName + " with the user: " + userName);
-            log.error(e.getMessage());
-            throw new GFacHandlerException(e);
-        } catch (Exception e) {
-        	 try {
- 				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
- 			} catch (GFacException e1) {
- 				 log.error(e1.getLocalizedMessage());
- 			}
-        	throw new GFacHandlerException(e);
-        }
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.common.exception.ApplicationSettingsException;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.SSHApiException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.net.MalformedURLException;
+//import java.net.URL;
+//import java.util.*;
+//
+///**
+// * This handler will copy outputs from airavata installed local directory
+// * to a remote location, prior to this handler SCPOutputHandler should be invoked
+// * Should add following configuration to gfac-config.xml and configure the keys properly
+// * <Handler class="AdvancedSCPOutputHandler">
+//                            <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
+//                            <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
+//                        <property name="userName" value="airavata"/>
+//                        <property name="hostName" value="gw98.iu.xsede.org"/>
+//                        <property name="outputPath" value="/home/airavata/outputData"/>
+//                        <property name="passPhrase" value="/home/airavata/outputData"/>
+//                        <property name="password" value="/home/airavata/outputData"/>
+//
+// */
+//public class AdvancedSCPOutputHandler extends AbstractHandler {
+//    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPOutputHandler.class);
+//
+//    public static final int DEFAULT_SSH_PORT = 22;
+//
+//    private String password = null;
+//
+//    private String publicKeyPath;
+//
+//    private String passPhrase;
+//
+//    private String privateKeyPath;
+//
+//    private String userName;
+//
+//    private String hostName;
+//
+//    private String outputPath;
+//
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//        password = (String)properties.get("password");
+//        passPhrase = (String)properties.get("passPhrase");
+//        privateKeyPath = (String)properties.get("privateKeyPath");
+//        publicKeyPath = (String)properties.get("publicKeyPath");
+//        userName = (String)properties.get("userName");
+//        hostName = (String)properties.get("hostName");
+//        outputPath = (String)properties.get("outputPath");
+//    }
+//
+//    @Override
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//    	RemoteCluster remoteCluster = null;
+//        AuthenticationInfo authenticationInfo = null;
+//        if (password != null) {
+//            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
+//        } else {
+//            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
+//                    this.passPhrase);
+//        }
+//        try {
+//            String hostName = jobExecutionContext.getHostName();
+//            if (jobExecutionContext.getSecurityContext(hostName) == null) {
+//                try {
+//                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//                } catch (ApplicationSettingsException e) {
+//                    log.error(e.getMessage());
+//                    try {
+//                        StringWriter errors = new StringWriter();
+//                        e.printStackTrace(new PrintWriter(errors));
+//         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//         			} catch (GFacException e1) {
+//         				 log.error(e1.getLocalizedMessage());
+//         			}
+//                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//                }
+//            }
+//            String standardError = jobExecutionContext.getStandardError();
+//            String standardOutput = jobExecutionContext.getStandardOutput();
+//            super.invoke(jobExecutionContext);
+//            // Server info
+//            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){
+//                try{
+//                    URL outputPathURL = new URL(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir());
+//                    this.userName = outputPathURL.getUserInfo();
+//                    this.hostName = outputPathURL.getHost();
+//                    outputPath = outputPathURL.getPath();
+//                } catch (MalformedURLException e) {
+//                    log.error(e.getLocalizedMessage(),e);
+//                }
+//            }
+//            String key = GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
+//            remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
+//            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && !jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().isPersistOutputData()){
+//            outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
+//                    + File.separator;
+//                remoteCluster.makeDirectory(outputPath);
+//            }
+//            remoteCluster.scpTo(outputPath, standardError);
+//            remoteCluster.scpTo(outputPath, standardOutput);
+//            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
+//            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
+//            Set<String> keys = output.keySet();
+//            for (String paramName : keys) {
+//                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
+//                if (outputDataObjectType.getType() == DataType.URI) {
+//                    // for failed jobs outputs are not generated. So we should not download outputs
+//                    if (GFacUtils.isFailedJob(jobExecutionContext)){
+//                        continue;
+//                    }
+//                	String downloadFile = outputDataObjectType.getValue();
+//                    if(downloadFile == null || !(new File(downloadFile).isFile())){
+//                        GFacUtils.saveErrorDetails(jobExecutionContext, "Empty Output returned from the application", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//                		throw new GFacHandlerException("Empty Output returned from the application.." );
+//                	}
+//                	remoteCluster.scpTo(outputPath, downloadFile);
+//                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
+//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
+//                    dataObjectType.setName(paramName);
+//                    dataObjectType.setType(DataType.URI);
+//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
+//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
+//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
+//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
+//                    outputArray.add(dataObjectType);
+//                }else if (outputDataObjectType.getType() == DataType.STDOUT) {
+//                    remoteCluster.scpTo(outputPath, standardOutput);
+//                    String fileName = standardOutput.substring(standardOutput.lastIndexOf(File.separatorChar)+1, standardOutput.length());
+//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
+//                    dataObjectType.setName(paramName);
+//                    dataObjectType.setType(DataType.STDOUT);
+//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
+//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
+//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
+//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
+//                    outputArray.add(dataObjectType);
+//                }else if (outputDataObjectType.getType() == DataType.STDERR) {
+//                    remoteCluster.scpTo(outputPath, standardError);
+//                    String fileName = standardError.substring(standardError.lastIndexOf(File.separatorChar)+1, standardError.length());
+//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
+//                    dataObjectType.setName(paramName);
+//                    dataObjectType.setType(DataType.STDERR);
+//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
+//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
+//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
+//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
+//                    outputArray.add(dataObjectType);
+//                }
+//             }
+//           experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
+//        } catch (SSHApiException e) {
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//			} catch (GFacException e1) {
+//				 log.error(e1.getLocalizedMessage());
+//			}
+//            log.error("Error transfering files to remote host : " + hostName + " with the user: " + userName);
+//            log.error(e.getMessage());
+//            throw new GFacHandlerException(e);
+//        } catch (Exception e) {
+//        	 try {
+// 				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+// 			} catch (GFacException e1) {
+// 				 log.error(e1.getLocalizedMessage());
+// 			}
+//        	throw new GFacHandlerException(e);
+//        }
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
index 254b028..5dc9f2a 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
@@ -1,78 +1,78 @@
-package org.apache.airavata.gfac.ssh.handler;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.gfac.ssh.util.HandleOutputs;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.apache.airavata.registry.cpi.RegistryException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NewSSHOutputHandler extends AbstractHandler{
-
-	 private static final Logger log = LoggerFactory.getLogger(NewSSHOutputHandler.class);
-
-	    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-	        String hostAddress = jobExecutionContext.getHostName();
-	      	RemoteCluster remoteCluster = null;
-	      	// Security Context and connection
-	        try {
-	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-	                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-	            }
-	            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-	            if (remoteCluster == null) {
-	                throw new GFacProviderException("Security context is not set properly");
-	            } else {
-	                log.info("Successfully retrieved the Security Context");
-	            }
-	        } catch (Exception e) {
-	            log.error(e.getMessage());
-	            try {
-                    StringWriter errors = new StringWriter();
-                    e.printStackTrace(new PrintWriter(errors));
-	                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-	            } catch (GFacException e1) {
-	                log.error(e1.getLocalizedMessage());
-	            }
-	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-	        }
-
-	        super.invoke(jobExecutionContext);
-	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
-	        try {
-				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-			} catch (RegistryException e) {
-				throw new GFacHandlerException(e);
-			}
-
-	       
-	    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    @Override
-	public void initProperties(Properties properties) throws GFacHandlerException {
-		// TODO Auto-generated method stub
-		
-	}
-
-}
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.List;
+//import java.util.Properties;
+//
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.gfac.ssh.util.HandleOutputs;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.apache.airavata.registry.cpi.RegistryException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class NewSSHOutputHandler extends AbstractHandler{
+//
+//	 private static final Logger log = LoggerFactory.getLogger(NewSSHOutputHandler.class);
+//
+//	    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//	        String hostAddress = jobExecutionContext.getHostName();
+//	      	RemoteCluster remoteCluster = null;
+//	      	// Security Context and connection
+//	        try {
+//	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//	                GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//	            }
+//	            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//	            if (remoteCluster == null) {
+//	                throw new GFacProviderException("Security context is not set properly");
+//	            } else {
+//	                log.info("Successfully retrieved the Security Context");
+//	            }
+//	        } catch (Exception e) {
+//	            log.error(e.getMessage());
+//	            try {
+//                    StringWriter errors = new StringWriter();
+//                    e.printStackTrace(new PrintWriter(errors));
+//	                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//	            } catch (GFacException e1) {
+//	                log.error(e1.getLocalizedMessage());
+//	            }
+//	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//	        }
+//
+//	        super.invoke(jobExecutionContext);
+//	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
+//	        try {
+//				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
+//			} catch (RegistryException e) {
+//				throw new GFacHandlerException(e);
+//			}
+//
+//
+//	    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    @Override
+//	public void initProperties(Properties properties) throws GFacHandlerException {
+//		// TODO Auto-generated method stub
+//
+//	}
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
index 1c7a7a6..d8afb06 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
@@ -1,119 +1,119 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Properties;
-
-public class SSHDirectorySetupHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(SSHDirectorySetupHandler.class);
-
-	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
- 				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
- 			} catch (GFacException e1) {
- 				 log.error(e1.getLocalizedMessage());
- 			}
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        } 
-
-        log.info("Setup SSH job directorties");
-        super.invoke(jobExecutionContext);
-        makeDirectory(jobExecutionContext);
-
-	}
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-		RemoteCluster remoteCluster = null;
-		try{
-            String hostAddress = jobExecutionContext.getHostName();
-            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-        if (remoteCluster == null) {
-            throw new GFacHandlerException("Security context is not set properly");
-        } else {
-            log.info("Successfully retrieved the Security Context");
-        }
-            String workingDirectory = jobExecutionContext.getWorkingDir();
-            remoteCluster.makeDirectory(workingDirectory);
-            if(!jobExecutionContext.getInputDir().equals(workingDirectory))
-            	remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
-            if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
-            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
-            
-            DataTransferDetails detail = new DataTransferDetails();
-            TransferStatus status = new TransferStatus();
-            status.setTransferState(TransferState.DIRECTORY_SETUP);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("Working directory = " + workingDirectory);
-
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-        } catch (Exception e) {
-			DataTransferDetails detail = new DataTransferDetails();
-            TransferStatus status = new TransferStatus();
-            status.setTransferState(TransferState.FAILED);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("Working directory = " + jobExecutionContext.getWorkingDir());
-            try {
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error executing the Handler: " + SSHDirectorySetupHandler.class, e);
-        }
-        
-	}
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.model.experiment.*;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.Properties;
+//
+//public class SSHDirectorySetupHandler extends AbstractHandler {
+//    private static final Logger log = LoggerFactory.getLogger(SSHDirectorySetupHandler.class);
+//
+//	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        try {
+//            String hostAddress = jobExecutionContext.getHostName();
+//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//            }
+//        } catch (Exception e) {
+//            log.error(e.getMessage());
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+// 				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+// 			} catch (GFacException e1) {
+// 				 log.error(e1.getLocalizedMessage());
+// 			}
+//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//        }
+//
+//        log.info("Setup SSH job directorties");
+//        super.invoke(jobExecutionContext);
+//        makeDirectory(jobExecutionContext);
+//
+//	}
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//		RemoteCluster remoteCluster = null;
+//		try{
+//            String hostAddress = jobExecutionContext.getHostName();
+//            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//        if (remoteCluster == null) {
+//            throw new GFacHandlerException("Security context is not set properly");
+//        } else {
+//            log.info("Successfully retrieved the Security Context");
+//        }
+//            String workingDirectory = jobExecutionContext.getWorkingDir();
+//            remoteCluster.makeDirectory(workingDirectory);
+//            if(!jobExecutionContext.getInputDir().equals(workingDirectory))
+//            	remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
+//            if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
+//            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
+//
+//            DataTransferDetails detail = new DataTransferDetails();
+//            TransferStatus status = new TransferStatus();
+//            status.setTransferState(TransferState.DIRECTORY_SETUP);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription("Working directory = " + workingDirectory);
+//
+//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//        } catch (Exception e) {
+//			DataTransferDetails detail = new DataTransferDetails();
+//            TransferStatus status = new TransferStatus();
+//            status.setTransferState(TransferState.FAILED);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription("Working directory = " + jobExecutionContext.getWorkingDir());
+//            try {
+//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
+//            } catch (Exception e1) {
+//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error executing the Handler: " + SSHDirectorySetupHandler.class, e);
+//        }
+//
+//	}
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
index 86584ca..b1e485a 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
@@ -1,198 +1,198 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-
-public class SSHInputHandler extends AbstractHandler {
-
-    private static final Logger log = LoggerFactory.getLogger(SSHInputHandler.class);
-
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        DataTransferDetails detail = new DataTransferDetails();
-        detail.setTransferDescription("Input Data Staging");
-        TransferStatus status = new TransferStatus();
-        int index = 0;
-        int oldIndex = 0;
-        List<String> oldFiles = new ArrayList<String>();
-        StringBuffer data = new StringBuffer("|");
-        MessageContext inputNew = new MessageContext();
-        RemoteCluster remoteCluster = null;
-        
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                try {
-                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
-                } catch (ApplicationSettingsException e) {
-                    log.error(e.getMessage());
-                    try {
-                        StringWriter errors = new StringWriter();
-                        e.printStackTrace(new PrintWriter(errors));
-         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-         			} catch (GFacException e1) {
-         				 log.error(e1.getLocalizedMessage());
-         			}
-                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                }
-            }
-
-            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                throw new GFacException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-            log.info("Invoking SCPInputHandler");
-            super.invoke(jobExecutionContext);
-
-
-            MessageContext input = jobExecutionContext.getInMessageContext();
-            Set<String> parameters = input.getParameters().keySet();
-            for (String paramName : parameters) {
-                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-                String paramValue = inputParamType.getValue();
-                //TODO: Review this with type
-                if (inputParamType.getType() == DataType.URI) {
-                    if (index < oldIndex) {
-                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-                        inputParamType.setValue(oldFiles.get(index));
-                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-                    } else {
-                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
-                        inputParamType.setValue(stageInputFile);
-                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
-                        status.setTransferState(TransferState.UPLOAD);
-                        detail.setTransferStatus(status);
-                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
-                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                    }
-                }// FIXME: what is the thrift model DataType equivalent for URIArray type?
-//                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-//                	if (index < oldIndex) {
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.common.exception.ApplicationSettingsException;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.experiment.*;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.IOException;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.ArrayList;
+//import java.util.List;
+//import java.util.Properties;
+//import java.util.Set;
+//
+//public class SSHInputHandler extends AbstractHandler {
+//
+//    private static final Logger log = LoggerFactory.getLogger(SSHInputHandler.class);
+//
+//
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        DataTransferDetails detail = new DataTransferDetails();
+//        detail.setTransferDescription("Input Data Staging");
+//        TransferStatus status = new TransferStatus();
+//        int index = 0;
+//        int oldIndex = 0;
+//        List<String> oldFiles = new ArrayList<String>();
+//        StringBuffer data = new StringBuffer("|");
+//        MessageContext inputNew = new MessageContext();
+//        RemoteCluster remoteCluster = null;
+//
+//        try {
+//            String hostAddress = jobExecutionContext.getHostName();
+//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                try {
+//                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//                } catch (ApplicationSettingsException e) {
+//                    log.error(e.getMessage());
+//                    try {
+//                        StringWriter errors = new StringWriter();
+//                        e.printStackTrace(new PrintWriter(errors));
+//         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//         			} catch (GFacException e1) {
+//         				 log.error(e1.getLocalizedMessage());
+//         			}
+//                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//                }
+//            }
+//
+//            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//            if (remoteCluster == null) {
+//                throw new GFacException("Security context is not set properly");
+//            } else {
+//                log.info("Successfully retrieved the Security Context");
+//            }
+//            log.info("Invoking SCPInputHandler");
+//            super.invoke(jobExecutionContext);
+//
+//
+//            MessageContext input = jobExecutionContext.getInMessageContext();
+//            Set<String> parameters = input.getParameters().keySet();
+//            for (String paramName : parameters) {
+//                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
+//                String paramValue = inputParamType.getValue();
+//                //TODO: Review this with type
+//                if (inputParamType.getType() == DataType.URI) {
+//                    if (index < oldIndex) {
 //                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-//                        ((URIParameterType) actualParameter.getType()).setValue(oldFiles.get(index));
+//                        inputParamType.setValue(oldFiles.get(index));
 //                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-//                    }else{
-//                	List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-//                    List<String> newFiles = new ArrayList<String>();
-//                    for (String paramValueEach : split) {
-//                        String stageInputFiles = stageInputFiles(remoteCluster,jobExecutionContext, paramValueEach);
+//                    } else {
+//                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
+//                        inputParamType.setValue(stageInputFile);
+//                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
 //                        status.setTransferState(TransferState.UPLOAD);
 //                        detail.setTransferStatus(status);
-//                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
-//                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//                        newFiles.add(stageInputFiles);
-//                        StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-//                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//                    }
-//                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+//                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
+//                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
 //                    }
-//                }
-                inputNew.getParameters().put(paramName, inputParamType);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            status.setTransferState(TransferState.FAILED);
-            detail.setTransferStatus(status);
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-        jobExecutionContext.setInMessageContext(inputNew);
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        int i = paramValue.lastIndexOf(File.separator);
-        String substring = paramValue.substring(i + 1);
-        try {
-            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
-            if(paramValue.startsWith("scp:")){
-            	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-            	remoteCluster.scpThirdParty(paramValue, targetFile);
-            }else{
-            if(paramValue.startsWith("file")){
-                paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-            }
-            boolean success = false;
-            int j = 1;
-            while(!success){
-            try {
-				remoteCluster.scpTo(targetFile, paramValue);
-				success = true;
-			} catch (Exception e) {
-				log.info(e.getLocalizedMessage());
-				Thread.sleep(2000);
-				 if(j==3) {
-					throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-				 }
-            }
-            j++;
-            }
-            }
-            return targetFile;
-        } catch (Exception e) {
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+//                }// FIXME: what is the thrift model DataType equivalent for URIArray type?
+////                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+////                	if (index < oldIndex) {
+////                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
+////                        ((URIParameterType) actualParameter.getType()).setValue(oldFiles.get(index));
+////                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
+////                    }else{
+////                	List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
+////                    List<String> newFiles = new ArrayList<String>();
+////                    for (String paramValueEach : split) {
+////                        String stageInputFiles = stageInputFiles(remoteCluster,jobExecutionContext, paramValueEach);
+////                        status.setTransferState(TransferState.UPLOAD);
+////                        detail.setTransferStatus(status);
+////                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
+////                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+////                        newFiles.add(stageInputFiles);
+////                        StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
+////                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
+////                    }
+////                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+////                    }
+////                }
+//                inputNew.getParameters().put(paramName, inputParamType);
+//            }
+//        } catch (Exception e) {
+//            log.error(e.getMessage());
+//            status.setTransferState(TransferState.FAILED);
+//            detail.setTransferStatus(status);
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
+//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//            } catch (Exception e1) {
+//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//        }
+//        jobExecutionContext.setInMessageContext(inputNew);
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
+//        int i = paramValue.lastIndexOf(File.separator);
+//        String substring = paramValue.substring(i + 1);
+//        try {
+//            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
+//            if(paramValue.startsWith("scp:")){
+//            	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
+//            	remoteCluster.scpThirdParty(paramValue, targetFile);
+//            }else{
+//            if(paramValue.startsWith("file")){
+//                paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
+//            }
+//            boolean success = false;
+//            int j = 1;
+//            while(!success){
+//            try {
+//				remoteCluster.scpTo(targetFile, paramValue);
+//				success = true;
+//			} catch (Exception e) {
+//				log.info(e.getLocalizedMessage());
+//				Thread.sleep(2000);
+//				 if(j==3) {
+//					throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//				 }
+//            }
+//            j++;
+//            }
+//            }
+//            return targetFile;
+//        } catch (Exception e) {
+//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//        }
+//    }
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}


Mime
View raw message