airavata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From lah...@apache.org
Subject [1/2] airavata git commit: fixing the recovery for SSHProvider
Date Wed, 22 Apr 2015 21:20:20 GMT
Repository: airavata
Updated Branches:
  refs/heads/master bbb43ffb4 -> ab070e179


fixing the recovery for SSHProvider


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/6409f076
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/6409f076
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/6409f076

Branch: refs/heads/master
Commit: 6409f076481c182fbcf22058f1a71b196599395b
Parents: 4a043be
Author: Lahiru Gunathilake <glahiru@gmail.com>
Authored: Wed Apr 22 17:19:33 2015 -0400
Committer: Lahiru Gunathilake <glahiru@gmail.com>
Committed: Wed Apr 22 17:19:33 2015 -0400

----------------------------------------------------------------------
 .../client/samples/CreateLaunchExperiment.java  |   4 +-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   3 +-
 .../core/monitor/GfacInternalStatusUpdator.java |   4 +-
 .../airavata/gfac/core/utils/GFacUtils.java     |  76 +++++++-------
 .../gfac/ssh/provider/impl/SSHProvider.java     | 101 ++++++++++++++++---
 5 files changed, 134 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/6409f076/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index 6cac71e..08a725e 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -166,8 +166,8 @@ public class CreateLaunchExperiment {
 //                final String expId = createExperimentForSSHHost(airavata);
 //                final String expId = createEchoExperimentForFSD(airavataClient);
 //                final String expId = createMPIExperimentForFSD(airavataClient);
-//               final String expId = createEchoExperimentForStampede(airavataClient);
-                final String expId = createEchoExperimentForTrestles(airavataClient);
+               final String expId = createEchoExperimentForStampede(airavataClient);
+//                final String expId = createEchoExperimentForTrestles(airavataClient);
 //                final String expId = createExperimentEchoForLocalHost(airavataClient);
 //                final String expId = createExperimentWRFTrestles(airavataClient);
 //                final String expId = createExperimentForBR2(airavataClient);

http://git-wip-us.apache.org/repos/asf/airavata/blob/6409f076/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 130f829..a6df8d5 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -662,6 +662,7 @@ public class BetterGfacImpl implements GFac,Watcher {
 				log.info("ExperimentId: " + experimentID + " taskId: " + jobExecutionContext.getTaskData().getTaskID());
 			}
 		} catch (Exception e) {
+            log.error(e.getMessage(),e);
 			try {
 				// we make the experiment as failed due to exception scenario
 				monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext),
GfacExperimentState.FAILED));
@@ -796,7 +797,7 @@ public class BetterGfacImpl implements GFac,Watcher {
         if (provider != null) {
             monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext),
GfacExperimentState.PROVIDERINVOKING));
             String plState = GFacUtils.getPluginState(zk, jobExecutionContext, provider.getClass().getName());
-            if (Integer.valueOf(plState) >= GfacPluginState.INVOKED.getValue()) {    //
this will make sure if a plugin crashes it will not launch from the scratch, but plugins have
to save their invoked state
+            if (plState!=null && Integer.valueOf(plState) >= GfacPluginState.INVOKED.getValue())
{    // this will make sure if a plugin crashes it will not launch from the scratch, but plugins
have to save their invoked state
                 if (provider instanceof GFacRecoverableProvider) {
                     GFacUtils.createPluginZnode(zk, jobExecutionContext, provider.getClass().getName());
                     ((GFacRecoverableProvider) provider).recover(jobExecutionContext);

http://git-wip-us.apache.org/repos/asf/airavata/blob/6409f076/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
index d03237e..c0c9dd3 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
@@ -53,7 +53,7 @@ public class GfacInternalStatusUpdator implements AbstractActivityListener,
Watc
         MonitorID monitorID = statusChangeRequest.getMonitorID();
         String experimentNode = ServerSettings.getSetting(Constants.ZOOKEEPER_GFAC_EXPERIMENT_NODE,
"/gfac-experiments");
         String experimentPath = experimentNode + File.separator + ServerSettings.getSetting(Constants.ZOOKEEPER_GFAC_SERVER_NAME)
-                + File.separator + statusChangeRequest.getMonitorID().getExperimentID() +
"+" + monitorID.getTaskID();
+                + File.separator + statusChangeRequest.getMonitorID().getExperimentID();
         Stat exists = null;
         try {
             if (!zk.getState().isConnected()) {
@@ -63,7 +63,7 @@ public class GfacInternalStatusUpdator implements AbstractActivityListener,
Watc
                     mutex.wait();
                 }
             }
-            exists = zk.exists(experimentPath, false);// this znode is created by orchestrator
so it has to exist at this level
+            exists = zk.exists(experimentPath, false);
             if (exists == null) {
                 logger.error("ZK path: " + experimentPath + " does not exists !!");
                 logger.error("Zookeeper is in an inconsistent state !!! ");

http://git-wip-us.apache.org/repos/asf/airavata/blob/6409f076/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index 7ed1a38..054e0c3 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -1164,10 +1164,7 @@ public class GFacUtils {
 		String newExpNode = experimentPath + File.separator + experimentID;
 		Stat exists1 = zk.exists(newExpNode, false);
 		String experimentEntry = GFacUtils.findExperimentEntry(experimentID, zk);
-		if (exists1 != null) {
-			log.error("This request is wrong because its already running in the same instance");
-			return false;
-		} else if (experimentEntry == null) {  // this means this is a very new experiment
+		if (experimentEntry == null) {  // this means this is a very new experiment
 			// are going to create a new node
 			log.info("This is a new Job, so creating all the experiment docs from the scratch");
 
@@ -1184,46 +1181,57 @@ public class GFacUtils {
 							.valueOf(GfacExperimentState.LAUNCHED.getValue())
 							.getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE,
 					CreateMode.PERSISTENT);
+
+			if(zk.exists(s,false)!=null){
+				log.info("Created the node: "+s+" successfully !");
+			}else{
+				log.error("Error creating node: "+s+" successfully !");
+			}
+
 			String s1 = zk.create(newExpNode + File.separator + "operation", "submit".getBytes(),
ZooDefs.Ids.OPEN_ACL_UNSAFE,
 					CreateMode.PERSISTENT);
 			zk.exists(s1, true);// we want to know when this node get deleted
-			String s2 = zk.create(newExpNode + AiravataZKUtils.DELIVERY_TAG_POSTFIX, longToBytes(deliveryTag),
ZooDefs.Ids.OPEN_ACL_UNSAFE,  // here we store the value of delivery message
+			zk.create(newExpNode + AiravataZKUtils.DELIVERY_TAG_POSTFIX, longToBytes(deliveryTag),
ZooDefs.Ids.OPEN_ACL_UNSAFE,  // here we store the value of delivery message
 					CreateMode.PERSISTENT);
 		} else {
 			log.error("ExperimentID: " + experimentID + " taskID: " + taskID
 					+ " was running by some Gfac instance,but it failed");
-			log.info("This is an old Job, so copying data from old experiment location");
-			zk.create(newExpNode,
-					zk.getData(experimentEntry, false, exists1),
-					ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
-
-			List<String> children = zk.getChildren(experimentEntry,
-					false);
-			for (String childNode1 : children) {
-				String level1 = experimentEntry + File.separator
-						+ childNode1;
-				Stat exists2 = zk.exists(level1, false); // no need to check exists
-				String newLeve1 = newExpNode + File.separator + childNode1;
-				log.info("Creating new znode: " + newLeve1); // these has to be info logs
-				zk.create(newLeve1, zk.getData(level1, false, exists2),
+			if(newExpNode.equals(experimentEntry)){
+				log.info("Re-launch experiment came to the same GFac instance");
+			}else {
+				log.info("Re-launch experiment came to a new GFac instance so we are moving data to new
gfac node");
+				zk.create(newExpNode,
+						zk.getData(experimentEntry, false, exists1),
 						ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
-				for (String childNode2 : zk.getChildren(level1, false)) {
-					String level2 = level1 + File.separator + childNode2;
-					Stat exists3 = zk.exists(level2, false); // no need to check exists
-					String newLeve2 = newLeve1 + File.separator
-							+ childNode2;
-					log.info("Creating new znode: " + newLeve2);
-					zk.create(newLeve2, zk.getData(level2, false, exists3),
-							ZooDefs.Ids.OPEN_ACL_UNSAFE,
-							CreateMode.PERSISTENT);
+
+				List<String> children = zk.getChildren(experimentEntry,
+						false);
+				for (String childNode1 : children) {
+					String level1 = experimentEntry + File.separator
+							+ childNode1;
+					Stat exists2 = zk.exists(level1, false); // no need to check exists
+					String newLeve1 = newExpNode + File.separator + childNode1;
+					log.info("Creating new znode: " + newLeve1); // these has to be info logs
+					zk.create(newLeve1, zk.getData(level1, false, exists2),
+							ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
+					for (String childNode2 : zk.getChildren(level1, false)) {
+						String level2 = level1 + File.separator + childNode2;
+						Stat exists3 = zk.exists(level2, false); // no need to check exists
+						String newLeve2 = newLeve1 + File.separator
+								+ childNode2;
+						log.info("Creating new znode: " + newLeve2);
+						zk.create(newLeve2, zk.getData(level2, false, exists3),
+								ZooDefs.Ids.OPEN_ACL_UNSAFE,
+								CreateMode.PERSISTENT);
+					}
 				}
+				// After all the files are successfully transfered we delete the
+				// old experiment,otherwise we do
+				// not delete a single file
+				log.info("After a successful copying of experiment data for an old experiment we delete
the old data");
+				log.info("Deleting experiment data: " + experimentEntry);
+				ZKUtil.deleteRecursive(zk, experimentEntry);
 			}
-			// After all the files are successfully transfered we delete the
-			// old experiment,otherwise we do
-			// not delete a single file
-			log.info("After a successful copying of experiment data for an old experiment we delete
the old data");
-			log.info("Deleting experiment data: " + experimentEntry);
-			ZKUtil.deleteRecursive(zk, experimentEntry);
 		}
 		return true;
 	}

http://git-wip-us.apache.org/repos/asf/airavata/blob/6409f076/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index 3e0a1d9..3095831 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@ -21,9 +21,9 @@
 
 package org.apache.airavata.gfac.ssh.provider.impl;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
 import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.gfac.Constants;
 import org.apache.airavata.gfac.ExecutionMode;
 import org.apache.airavata.gfac.GFacException;
@@ -33,7 +33,7 @@ import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.handler.ThreadedHandler;
 import org.apache.airavata.gfac.core.notification.events.StartExecutionEvent;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
+import org.apache.airavata.gfac.core.provider.AbstractRecoverableProvider;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
@@ -49,16 +49,13 @@ import org.apache.airavata.gsi.ssh.impl.StandardOutReader;
 import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
 import org.apache.airavata.model.appcatalog.appinterface.DataType;
 import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.EmailMonitorProperty;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.JobState;
+import org.apache.xmlbeans.XmlException;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import sun.reflect.generics.reflectiveObjects.NotImplementedException;
@@ -69,7 +66,7 @@ import java.util.*;
 /**
  * Execute application using remote SSH
  */
-public class SSHProvider extends AbstractProvider {
+public class SSHProvider extends AbstractRecoverableProvider {
     private static final Logger log = LoggerFactory.getLogger(SSHProvider.class);
     private Cluster cluster;
     private String jobID = null;
@@ -142,6 +139,7 @@ public class SSHProvider extends AbstractProvider {
             }
         } else {
             try {
+                StringBuffer data = new StringBuffer();
                 jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
                 JobDetails jobDetails = new JobDetails();
                 String hostAddress = jobExecutionContext.getHostName();
@@ -173,21 +171,27 @@ public class SSHProvider extends AbstractProvider {
                         jobDetails.setJobID(jobID);
                         GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED);
                     }
+                    data.append("jobDesc=").append(jobDescriptor.toXML());
+                    data.append(",jobId=").append(jobDetails.getJobID());
                     delegateToMonitorHandlers(jobExecutionContext);
                 } catch (SSHApiException e) {
                     String error = "Error submitting the job to host " + jobExecutionContext.getHostName()
+ " message: " + e.getMessage();
                     log.error(error);
                     jobDetails.setJobID("none");
                     GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-                    GFacUtils.saveErrorDetails(jobExecutionContext,  error, CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                     throw new GFacProviderException(error, e);
                 } catch (Exception e) {
                     String error = "Error submitting the job to host " + jobExecutionContext.getHostName()
+ " message: " + e.getMessage();
                     log.error(error);
                     jobDetails.setJobID("none");
                     GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-                    GFacUtils.saveErrorDetails(jobExecutionContext,  error, CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                     throw new GFacProviderException(error, e);
+                } finally {
+                    log.info("Saving data for future recovery: ");
+                    log.info(data.toString());
+                    GFacUtils.savePluginData(jobExecutionContext, data, this.getClass().getName());
                 }
             } catch (GFacException e) {
                 throw new GFacProviderException(e.getMessage(), e);
@@ -233,7 +237,7 @@ public class SSHProvider extends AbstractProvider {
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
                 StringWriter errors = new StringWriter();
                 e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                 throw new GFacProviderException(error, e);
             } catch (Exception e) {
                 String error = "Error submitting the job to host " + jobExecutionContext.getHostName()
+ " message: " + e.getMessage();
@@ -242,7 +246,7 @@ public class SSHProvider extends AbstractProvider {
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
                 StringWriter errors = new StringWriter();
                 e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT,
ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                 throw new GFacProviderException(error, e);
             }
             // we know this host is type GsiSSHHostType
@@ -376,7 +380,7 @@ public class SSHProvider extends AbstractProvider {
     }
 
     public void delegateToMonitorHandlers(JobExecutionContext jobExecutionContext) throws
GFacHandlerException, AppCatalogException {
-        if (jobExecutionContext.getPreferredJobSubmissionProtocol()== JobSubmissionProtocol.SSH)
{
+        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH)
{
             String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
             SSHJobSubmission sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
             MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
@@ -408,7 +412,74 @@ public class SSHProvider extends AbstractProvider {
             log.error("No Daemon handler is configured in gfac-config.xml, either pull or
push, so monitoring will not invoked" +
                     ", execution is configured as asynchronous, so Outhandler will not be
invoked");
         }
-
     }
 
+
+    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException,
GFacException {
+        // have to implement the logic to recover a gfac failure
+        initialize(jobExecutionContext);
+        if(hpcType) {
+            log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
+            String hostName = jobExecutionContext.getHostName();
+            String jobId = "";
+            String jobDesc = "";
+            String jobName = "";
+            try {
+                String pluginData = GFacUtils.getPluginData(jobExecutionContext, this.getClass().getName());
+                String[] split = pluginData.split(",");
+                if (split.length < 2) {
+                    this.execute(jobExecutionContext);
+                    return;
+                }
+                jobDesc = split[0].substring(8);
+                jobId = split[1].substring(6);
+                try {
+                    JobDescriptor jobDescriptor = JobDescriptor.fromXML(jobDesc);
+                    jobName = jobDescriptor.getJobName();
+                } catch (XmlException e) {
+                    log.error(e.getMessage(), e);
+                    log.error("Cannot parse plugin data stored, but trying to recover");
+
+                }
+                log.info("Following data have recovered: ");
+                log.info("Job Description: " + jobDesc);
+                log.info("Job Id: " + jobId);
+                if (jobName.isEmpty() || jobId.isEmpty() || "none".equals(jobId) ||
+                        "".equals(jobId)) {
+                    log.info("Cannot recover data so submitting the job again !!!");
+                    this.execute(jobExecutionContext);
+                    return;
+                }
+            } catch (ApplicationSettingsException e) {
+                log.error("Error while  recovering provider", e);
+            } catch (KeeperException e) {
+                log.error("Error while  recovering provider", e);
+            } catch (InterruptedException e) {
+                log.error("Error while  recovering provider", e);
+            }
+            try {
+                // Now we are we have enough data to recover
+                JobDetails jobDetails = new JobDetails();
+                jobDetails.setJobDescription(jobDesc);
+                jobDetails.setJobID(jobId);
+                jobDetails.setJobName(jobName);
+                jobExecutionContext.setJobDetails(jobDetails);
+                if (jobExecutionContext.getSecurityContext(hostName) == null) {
+                    try {
+                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
+                    } catch (ApplicationSettingsException e) {
+                        log.error(e.getMessage());
+                        throw new GFacHandlerException("Error while creating SSHSecurityContext",
e, e.getLocalizedMessage());
+                    }
+                }
+                delegateToMonitorHandlers(jobExecutionContext);
+            } catch (Exception e) {
+                log.error("Error while recover the job", e);
+                throw new GFacProviderException("Error delegating already ran job to Monitoring",
e);
+            }
+        }else{
+            log.info("We do not handle non hpc recovery so we simply run the Job directly");
+            this.execute(jobExecutionContext);
+        }
+    }
 }


Mime
View raw message