From airavata-commits-return-70-apmail-incubator-airavata-commits-archive=incubator.apache.org@incubator.apache.org Sat Jun 25 15:18:12 2011 Return-Path: X-Original-To: apmail-incubator-airavata-commits-archive@minotaur.apache.org Delivered-To: apmail-incubator-airavata-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 33EBD4FE4 for ; Sat, 25 Jun 2011 15:18:12 +0000 (UTC) Received: (qmail 56133 invoked by uid 500); 25 Jun 2011 15:18:12 -0000 Delivered-To: apmail-incubator-airavata-commits-archive@incubator.apache.org Received: (qmail 56097 invoked by uid 500); 25 Jun 2011 15:18:12 -0000 Mailing-List: contact airavata-commits-help@incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: airavata-dev@incubator.apache.org Delivered-To: mailing list airavata-commits@incubator.apache.org Received: (qmail 56087 invoked by uid 99); 25 Jun 2011 15:18:11 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 25 Jun 2011 15:18:11 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 25 Jun 2011 15:18:02 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 324C323889ED; Sat, 25 Jun 2011 15:17:39 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1139550 [3/6] - in /incubator/airavata/core/trunk/gfac: ./ .settings/ src/main/java/org/apache/airavata/core/gfac/context/ src/main/java/org/apache/airavata/core/gfac/context/impl/ src/main/java/org/apache/airavata/core/gfac/context/impl/u... Date: Sat, 25 Jun 2011 15:17:38 -0000 To: airavata-commits@incubator.apache.org From: smarru@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20110625151739.324C323889ED@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/GramProvider.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/GramProvider.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/GramProvider.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/GramProvider.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + package org.apache.airavata.core.gfac.provider; import java.io.File; @@ -11,17 +32,17 @@ import org.apache.airavata.core.gfac.con import org.apache.airavata.core.gfac.context.InvocationContext; import org.apache.airavata.core.gfac.context.impl.GSISecurityContext; import org.apache.airavata.core.gfac.exception.GfacException; -import org.apache.airavata.core.gfac.exception.JobSubmissionFault; import org.apache.airavata.core.gfac.exception.GfacException.FaultCode; +import org.apache.airavata.core.gfac.exception.JobSubmissionFault; import org.apache.airavata.core.gfac.external.GridFtp; import org.apache.airavata.core.gfac.model.ExecutionModel; import org.apache.airavata.core.gfac.notification.NotificationService; import org.apache.airavata.core.gfac.provider.utils.GramRSLGenerator; import org.apache.airavata.core.gfac.provider.utils.JobSubmissionListener; import org.apache.airavata.core.gfac.utils.ErrorCodes; +import org.apache.airavata.core.gfac.utils.GFacOptions.CurrentProviders; import org.apache.airavata.core.gfac.utils.GfacUtils; import org.apache.airavata.core.gfac.utils.OutputUtils; -import org.apache.airavata.core.gfac.utils.GFacOptions.CurrentProviders; import org.globus.gram.GramAttributes; import org.globus.gram.GramException; import org.globus.gram.GramJob; @@ -33,243 +54,261 @@ import edu.indiana.extreme.lead.workflow public class GramProvider extends AbstractProvider { - public static final String MYPROXY_SECURITY_CONTEXT = "myproxy"; + public static final String MYPROXY_SECURITY_CONTEXT = "myproxy"; - public void initialize(InvocationContext invocationContext) throws GfacException { - ExecutionContext appExecContext = invocationContext.getExecutionContext(); - ExecutionModel model = appExecContext.getExecutionModel(); - - GridFtp ftp = new GridFtp(); - - try { - GSSCredential gssCred = ((GSISecurityContext) invocationContext.getSecurityContext(MYPROXY_SECURITY_CONTEXT)).getGssCredentails(); - - // get Hostname - String hostgridFTP = null; - - if (model.getHostDesc().getHostConfiguration().getGridFTPArray() != null && model.getHostDesc().getHostConfiguration().getGridFTPArray().length > 0) { - hostgridFTP = model.getHostDesc().getHostConfiguration().getGridFTPArray(0).getEndPointReference(); - } else { - hostgridFTP = model.getHost(); - } - - URI tmpdirURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getTmpDir()); - URI workingDirURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getWorkingDir()); - URI inputURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getInputDataDir()); - URI outputURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getOutputDataDir()); - - log.info("Host FTP = " + hostgridFTP); - log.info("temp directory = " + tmpdirURI); - log.info("Working directory = " + workingDirURI); - log.info("Input directory = " + inputURI); - log.info("Output directory = " + outputURI); - - ftp.makeDir(tmpdirURI, gssCred); - ftp.makeDir(workingDirURI, gssCred); - ftp.makeDir(inputURI, gssCred); - ftp.makeDir(outputURI, gssCred); - - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void execute(InvocationContext invocationContext) throws GfacException { - ExecutionContext context = invocationContext.getExecutionContext(); - - String contact = null; - log.info("Searching for Gate Keeper"); - GlobusGatekeeperType gatekeeper = context.getExecutionModel().getGatekeeper(); - if (gatekeeper == null) { - contact = context.getExecutionModel().getHost(); - } else { - contact = gatekeeper.getEndPointReference(); - } - log.info("Using Globus GateKeeper " + contact); - GramJob job = null; - boolean jobSucsseful = false; - - String rsl = ""; - int errCode = 0; - - try { - GSSCredential gssCred = ((GSISecurityContext) context.getSecurityContext()).getGssCredentails(); - - log.info("Host desc = " + context.getExecutionModel().getHostDesc().xmlText()); - - GramAttributes jobAttr = GramRSLGenerator.configureRemoteJob(context); - rsl = jobAttr.toRSL(); - job = new GramJob(rsl); - job.setCredentials(gssCred); - - log.info("RSL = " + rsl); - - NotificationService notifier = context.getNotificationService(); - DurationObj compObj = notifier.computationStarted(); - StringBuffer buf = new StringBuffer(); - - JobSubmissionListener listener = new JobSubmissionListener(job, context); - job.addListener(listener); - log.info("Request to contact:" + contact); - // The first boolean is to specify the job is a batch job - use true - // for interactive and false for batch. - // the second boolean is to specify to use the full proxy and not - // delegate a limited proxy. - job.request(contact, false, false); - - log.info("JobID = " + job.getIDAsString()); - - // Gram.request(contact, job, false, false); - - buf.append("Finished launching job, Host = ").append(context.getExecutionModel().getHost()).append(" RSL = ").append(job.getRSL()).append("working directory =").append(context.getExecutionModel().getWorkingDir()).append("tempDirectory =").append(context.getExecutionModel().getTmpDir()) - .append("Globus GateKeeper cantact = ").append(contact); - context.getNotificationService().info(buf.toString()); - String gramJobid = job.getIDAsString(); - context.getNotificationService().info("JobID=" + gramJobid); - log.info(buf.toString()); - // Send Audit Notifications - notifier.appAudit(invocationContext.getServiceName(), new URI(job.getIDAsString()), contact, null, null, gssCred.getName().toString(), null, job.getRSL()); - - listener.waitFor(); - job.removeListener(listener); - - int jobStatus = listener.getStatus(); - if (jobStatus == GramJob.STATUS_FAILED) { - errCode = listener.getError(); - // Adding retry for error code to properties files as - // gfac.retryonJobErrorCodes with comma separated - if (context.getServiceContext().getGlobalConfiguration().getRetryonErrorCodes().contains(Integer.toString(errCode))) { - try { - log.info("Job Failed with Error code " + errCode + " and job id: " + gramJobid); - log.info("Retry job sumttion one more time for error code" + errCode); - job = new GramJob(rsl); - job.setCredentials(gssCred); - listener = new JobSubmissionListener(job, context); - job.addListener(listener); - job.request(contact, false, false); - String newGramJobid = job.getIDAsString(); - String jobStatusMessage = GfacUtils.formatJobStatus(newGramJobid, "RETRY"); - context.getNotificationService().info(jobStatusMessage); - context.getNotificationService().info("JobID=" + newGramJobid); - notifier.appAudit(context.getServiceContext().getService().getService().getServiceName().getStringValue(), new URI(job.getIDAsString()), contact, null, null, gssCred.getName().toString(), null, job.getRSL()); - listener.waitFor(); - job.removeListener(listener); - int jobStatus1 = listener.getStatus(); - if (jobStatus1 == GramJob.STATUS_FAILED) { - int errCode1 = listener.getError(); - String errorMsg = "Job " + job.getID() + " on host " + context.getExecutionModel().getHost() + " Error Code = " + errCode1; - String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); - throw new JobSubmissionFault(new Exception(errorMsg), localHost, "", "", CurrentProviders.Gram); - } - } catch (Exception e) { - String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); - throw new JobSubmissionFault(e, localHost, "", "", CurrentProviders.Gram); - } - } else { - String errorMsg = "Job " + job.getID() + " on host " + context.getExecutionModel().getHost() + " Error Code = " + errCode; - String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); - GfacException error = new JobSubmissionFault(new Exception(errorMsg), localHost, contact, rsl, CurrentProviders.Gram); - if (errCode == 8) { - error.setFaultCode(ErrorCodes.JOB_CANCELED); - } else { - error.setFaultCode(ErrorCodes.JOB_FAILED); - } - // error.addProperty(ErrorCodes.JOB_TYPE, - // ErrorCodes.JobType.Gram.toString()); - // error.addProperty(ErrorCodes.CONTACT, contact); - throw error; - } - } - notifier.computationFinished(compObj); - - /* - * Stdout and Stderror - */ - GridFtp ftp = new GridFtp(); - - // get Hostname - String hostgridFTP = null; - - if (invocationContext.getExecutionContext().getExecutionModel().getHostDesc().getHostConfiguration().getGridFTPArray() != null && invocationContext.getExecutionContext().getExecutionModel().getHostDesc().getHostConfiguration().getGridFTPArray().length > 0) { - hostgridFTP = invocationContext.getExecutionContext().getExecutionModel().getHostDesc().getHostConfiguration().getGridFTPArray(0).getEndPointReference(); - } else { - hostgridFTP = invocationContext.getExecutionContext().getExecutionModel().getHost(); - } - - URI stdoutURI = GfacUtils.createGsiftpURI(hostgridFTP, invocationContext.getExecutionContext().getExecutionModel().getStdOut()); - URI stderrURI = GfacUtils.createGsiftpURI(hostgridFTP, invocationContext.getExecutionContext().getExecutionModel().getStderr()); - - System.out.println(stdoutURI); - System.out.println(stderrURI); - - File logDir = new File("./service_logs"); - if (!logDir.exists()) { - logDir.mkdir(); - } - - // Get the Stdouts and StdErrs - QName x = QName.valueOf(invocationContext.getServiceName()); - String timeStampedServiceName = GfacUtils.createServiceDirName(x); - File localStdOutFile = new File(logDir, timeStampedServiceName + ".stdout"); - File localStdErrFile = new File(logDir, timeStampedServiceName + ".stderr"); - - String stdout = ftp.readRemoteFile(stdoutURI, gssCred, localStdOutFile); - String stderr = ftp.readRemoteFile(stderrURI, gssCred, localStdErrFile); - - //set to context - OutputUtils.fillOutputFromStdout(invocationContext.getMessageContext("output"), stdout, stderr); - - - jobSucsseful = true; - } catch (GramException e) { - String localHost = "xxxx"; - GfacException error = new JobSubmissionFault(e, localHost, contact, rsl, CurrentProviders.Gram); - if (errCode == 8) { - error.setFaultCode(ErrorCodes.JOB_CANCELED); - } else { - error.setFaultCode(ErrorCodes.JOB_FAILED); - } - // error.addProperty(ErrorCodes.JOB_TYPE, - // ErrorCodes.JobType.Gram.toString()); - // error.addProperty(ErrorCodes.CONTACT, contact); - throw error; - } catch (GSSException e) { - String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); - throw new JobSubmissionFault(e, localHost, contact, rsl, CurrentProviders.Gram); - } catch (URISyntaxException e) { - throw new GfacException(e, FaultCode.ErrorAtDependentService); - } catch (InterruptedException e) { - throw new GfacException(e, FaultCode.ErrorAtDependentService); - } finally { - if (job != null && !jobSucsseful) { - try { - job.cancel(); - } catch (Exception e) { - } - } - } - - } - - public void dispose(InvocationContext invocationContext) throws GfacException { - - } - - public void abort(InvocationContext invocationContext) throws GfacException { - try { - ExecutionContext context = invocationContext.getExecutionContext(); - GramJob job = new GramJob(""); - job.setID(context.getExecutionModel().getJobID()); - job.setCredentials(((GSISecurityContext) context.getSecurityContext()).getGssCredentails()); - job.cancel(); - } catch (MalformedURLException e) { - throw new GfacException(e, FaultCode.ErrorAtDependentService); - } catch (GramException e) { - throw new GfacException(e, FaultCode.ErrorAtDependentService); - } catch (GSSException e) { - throw new GfacException(e, FaultCode.ErrorAtDependentService); - } + public void initialize(InvocationContext invocationContext) throws GfacException { + ExecutionContext appExecContext = invocationContext.getExecutionContext(); + ExecutionModel model = appExecContext.getExecutionModel(); + + GridFtp ftp = new GridFtp(); + + try { + GSSCredential gssCred = ((GSISecurityContext) invocationContext + .getSecurityContext(MYPROXY_SECURITY_CONTEXT)).getGssCredentails(); + + // get Hostname + String hostgridFTP = null; + + if (model.getHostDesc().getHostConfiguration().getGridFTPArray() != null + && model.getHostDesc().getHostConfiguration().getGridFTPArray().length > 0) { + hostgridFTP = model.getHostDesc().getHostConfiguration().getGridFTPArray(0).getEndPointReference(); + } else { + hostgridFTP = model.getHost(); + } + + URI tmpdirURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getTmpDir()); + URI workingDirURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getWorkingDir()); + URI inputURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getInputDataDir()); + URI outputURI = GfacUtils.createGsiftpURI(hostgridFTP, model.getOutputDataDir()); + + log.info("Host FTP = " + hostgridFTP); + log.info("temp directory = " + tmpdirURI); + log.info("Working directory = " + workingDirURI); + log.info("Input directory = " + inputURI); + log.info("Output directory = " + outputURI); + + ftp.makeDir(tmpdirURI, gssCred); + ftp.makeDir(workingDirURI, gssCred); + ftp.makeDir(inputURI, gssCred); + ftp.makeDir(outputURI, gssCred); + + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void execute(InvocationContext invocationContext) throws GfacException { + ExecutionContext context = invocationContext.getExecutionContext(); + + String contact = null; + log.info("Searching for Gate Keeper"); + GlobusGatekeeperType gatekeeper = context.getExecutionModel().getGatekeeper(); + if (gatekeeper == null) { + contact = context.getExecutionModel().getHost(); + } else { + contact = gatekeeper.getEndPointReference(); + } + log.info("Using Globus GateKeeper " + contact); + GramJob job = null; + boolean jobSucsseful = false; + + String rsl = ""; + int errCode = 0; + + try { + GSSCredential gssCred = ((GSISecurityContext) context.getSecurityContext()).getGssCredentails(); + + log.info("Host desc = " + context.getExecutionModel().getHostDesc().xmlText()); + + GramAttributes jobAttr = GramRSLGenerator.configureRemoteJob(context); + rsl = jobAttr.toRSL(); + job = new GramJob(rsl); + job.setCredentials(gssCred); + + log.info("RSL = " + rsl); + + NotificationService notifier = context.getNotificationService(); + DurationObj compObj = notifier.computationStarted(); + StringBuffer buf = new StringBuffer(); + + JobSubmissionListener listener = new JobSubmissionListener(job, context); + job.addListener(listener); + log.info("Request to contact:" + contact); + // The first boolean is to specify the job is a batch job - use true + // for interactive and false for batch. + // the second boolean is to specify to use the full proxy and not + // delegate a limited proxy. + job.request(contact, false, false); + + log.info("JobID = " + job.getIDAsString()); + + // Gram.request(contact, job, false, false); + + buf.append("Finished launching job, Host = ").append(context.getExecutionModel().getHost()) + .append(" RSL = ").append(job.getRSL()).append("working directory =") + .append(context.getExecutionModel().getWorkingDir()).append("tempDirectory =") + .append(context.getExecutionModel().getTmpDir()).append("Globus GateKeeper cantact = ") + .append(contact); + context.getNotificationService().info(buf.toString()); + String gramJobid = job.getIDAsString(); + context.getNotificationService().info("JobID=" + gramJobid); + log.info(buf.toString()); + // Send Audit Notifications + notifier.appAudit(invocationContext.getServiceName(), new URI(job.getIDAsString()), contact, null, null, + gssCred.getName().toString(), null, job.getRSL()); + + listener.waitFor(); + job.removeListener(listener); + + int jobStatus = listener.getStatus(); + if (jobStatus == GramJob.STATUS_FAILED) { + errCode = listener.getError(); + // Adding retry for error code to properties files as + // gfac.retryonJobErrorCodes with comma separated + if (context.getServiceContext().getGlobalConfiguration().getRetryonErrorCodes() + .contains(Integer.toString(errCode))) { + try { + log.info("Job Failed with Error code " + errCode + " and job id: " + gramJobid); + log.info("Retry job sumttion one more time for error code" + errCode); + job = new GramJob(rsl); + job.setCredentials(gssCred); + listener = new JobSubmissionListener(job, context); + job.addListener(listener); + job.request(contact, false, false); + String newGramJobid = job.getIDAsString(); + String jobStatusMessage = GfacUtils.formatJobStatus(newGramJobid, "RETRY"); + context.getNotificationService().info(jobStatusMessage); + context.getNotificationService().info("JobID=" + newGramJobid); + notifier.appAudit(context.getServiceContext().getService().getService().getServiceName() + .getStringValue(), new URI(job.getIDAsString()), contact, null, null, gssCred.getName() + .toString(), null, job.getRSL()); + listener.waitFor(); + job.removeListener(listener); + int jobStatus1 = listener.getStatus(); + if (jobStatus1 == GramJob.STATUS_FAILED) { + int errCode1 = listener.getError(); + String errorMsg = "Job " + job.getID() + " on host " + + context.getExecutionModel().getHost() + " Error Code = " + errCode1; + String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); + throw new JobSubmissionFault(new Exception(errorMsg), localHost, "", "", + CurrentProviders.Gram); + } + } catch (Exception e) { + String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); + throw new JobSubmissionFault(e, localHost, "", "", CurrentProviders.Gram); + } + } else { + String errorMsg = "Job " + job.getID() + " on host " + context.getExecutionModel().getHost() + + " Error Code = " + errCode; + String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); + GfacException error = new JobSubmissionFault(new Exception(errorMsg), localHost, contact, rsl, + CurrentProviders.Gram); + if (errCode == 8) { + error.setFaultCode(ErrorCodes.JOB_CANCELED); + } else { + error.setFaultCode(ErrorCodes.JOB_FAILED); + } + // error.addProperty(ErrorCodes.JOB_TYPE, + // ErrorCodes.JobType.Gram.toString()); + // error.addProperty(ErrorCodes.CONTACT, contact); + throw error; + } + } + notifier.computationFinished(compObj); + + /* + * Stdout and Stderror + */ + GridFtp ftp = new GridFtp(); + + // get Hostname + String hostgridFTP = null; + + if (invocationContext.getExecutionContext().getExecutionModel().getHostDesc().getHostConfiguration() + .getGridFTPArray() != null + && invocationContext.getExecutionContext().getExecutionModel().getHostDesc().getHostConfiguration() + .getGridFTPArray().length > 0) { + hostgridFTP = invocationContext.getExecutionContext().getExecutionModel().getHostDesc() + .getHostConfiguration().getGridFTPArray(0).getEndPointReference(); + } else { + hostgridFTP = invocationContext.getExecutionContext().getExecutionModel().getHost(); + } + + URI stdoutURI = GfacUtils.createGsiftpURI(hostgridFTP, invocationContext.getExecutionContext() + .getExecutionModel().getStdOut()); + URI stderrURI = GfacUtils.createGsiftpURI(hostgridFTP, invocationContext.getExecutionContext() + .getExecutionModel().getStderr()); + + System.out.println(stdoutURI); + System.out.println(stderrURI); + + File logDir = new File("./service_logs"); + if (!logDir.exists()) { + logDir.mkdir(); + } + + // Get the Stdouts and StdErrs + QName x = QName.valueOf(invocationContext.getServiceName()); + String timeStampedServiceName = GfacUtils.createServiceDirName(x); + File localStdOutFile = new File(logDir, timeStampedServiceName + ".stdout"); + File localStdErrFile = new File(logDir, timeStampedServiceName + ".stderr"); + + String stdout = ftp.readRemoteFile(stdoutURI, gssCred, localStdOutFile); + String stderr = ftp.readRemoteFile(stderrURI, gssCred, localStdErrFile); + + // set to context + OutputUtils.fillOutputFromStdout(invocationContext.getMessageContext("output"), stdout, stderr); + + jobSucsseful = true; + } catch (GramException e) { + String localHost = "xxxx"; + GfacException error = new JobSubmissionFault(e, localHost, contact, rsl, CurrentProviders.Gram); + if (errCode == 8) { + error.setFaultCode(ErrorCodes.JOB_CANCELED); + } else { + error.setFaultCode(ErrorCodes.JOB_FAILED); + } + // error.addProperty(ErrorCodes.JOB_TYPE, + // ErrorCodes.JobType.Gram.toString()); + // error.addProperty(ErrorCodes.CONTACT, contact); + throw error; + } catch (GSSException e) { + String localHost = context.getServiceContext().getGlobalConfiguration().getLocalHost(); + throw new JobSubmissionFault(e, localHost, contact, rsl, CurrentProviders.Gram); + } catch (URISyntaxException e) { + throw new GfacException(e, FaultCode.ErrorAtDependentService); + } catch (InterruptedException e) { + throw new GfacException(e, FaultCode.ErrorAtDependentService); + } finally { + if (job != null && !jobSucsseful) { + try { + job.cancel(); + } catch (Exception e) { + } + } + } + + } + + public void dispose(InvocationContext invocationContext) throws GfacException { + + } + + public void abort(InvocationContext invocationContext) throws GfacException { + try { + ExecutionContext context = invocationContext.getExecutionContext(); + GramJob job = new GramJob(""); + job.setID(context.getExecutionModel().getJobID()); + job.setCredentials(((GSISecurityContext) context.getSecurityContext()).getGssCredentails()); + job.cancel(); + } catch (MalformedURLException e) { + throw new GfacException(e, FaultCode.ErrorAtDependentService); + } catch (GramException e) { + throw new GfacException(e, FaultCode.ErrorAtDependentService); + } catch (GSSException e) { + throw new GfacException(e, FaultCode.ErrorAtDependentService); + } - } + } } Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/LocalProvider.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/LocalProvider.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/LocalProvider.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/LocalProvider.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + package org.apache.airavata.core.gfac.provider; import java.io.BufferedReader; @@ -13,218 +34,212 @@ import java.util.Map; import org.apache.airavata.core.gfac.context.ExecutionContext; import org.apache.airavata.core.gfac.context.InvocationContext; import org.apache.airavata.core.gfac.exception.GfacException; -import org.apache.airavata.core.gfac.exception.JobSubmissionFault; import org.apache.airavata.core.gfac.exception.GfacException.FaultCode; +import org.apache.airavata.core.gfac.exception.JobSubmissionFault; import org.apache.airavata.core.gfac.notification.NotificationService; import org.apache.airavata.core.gfac.utils.GFacConstants; +import org.apache.airavata.core.gfac.utils.GFacOptions.CurrentProviders; import org.apache.airavata.core.gfac.utils.GfacUtils; import org.apache.airavata.core.gfac.utils.OutputUtils; -import org.apache.airavata.core.gfac.utils.GFacOptions.CurrentProviders; import edu.indiana.extreme.lead.workflow_tracking.common.DurationObj; public class LocalProvider extends AbstractProvider { - private static final String SPACE = " "; + private static final String SPACE = " "; - - private String buildCommand(List cmdList){ - StringBuffer buff = new StringBuffer(); - for (String string : cmdList) { - buff.append(string); - buff.append(SPACE); - } - return buff.toString(); - } - - public void initialize(InvocationContext invocationContext) throws GfacException { - ExecutionContext context = invocationContext.getExecutionContext(); - - log.info("working diectroy = " + context.getExecutionModel().getWorkingDir()); - log.info("temp directory = " + context.getExecutionModel().getTmpDir()); - new File(context.getExecutionModel().getWorkingDir()).mkdir(); - new File(context.getExecutionModel().getTmpDir()).mkdir(); - new File(context.getExecutionModel().getInputDataDir()).mkdir(); - new File(context.getExecutionModel().getOutputDataDir()).mkdir(); - } - - public void execute(InvocationContext invocationContext) throws GfacException { - ExecutionContext context = invocationContext.getExecutionContext(); - - List cmdList = new ArrayList(); - - try { - /* - * Notifier - */ - NotificationService notifier = context.getNotificationService(); - - /* - * Builder Command - */ - cmdList.add(context.getExecutionModel().getExecutable()); - cmdList.addAll(context.getExecutionModel().getInputParameters()); - - //create process builder from command - ProcessBuilder builder = new ProcessBuilder(cmdList); - - - // get the env of the host and the application - Map nv = context.getExecutionModel().getEnv(); - builder.environment().putAll(nv); - - // extra env's - builder.environment().put(GFacConstants.INPUT_DATA_DIR, context.getExecutionModel().getInputDataDir()); - builder.environment().put(GFacConstants.OUTPUT_DATA_DIR, context.getExecutionModel().getOutputDataDir()); - - //working directory - builder.directory(new File(context.getExecutionModel().getWorkingDir())); - - - //log info - log.info("Command = " + buildCommand(cmdList)); - log.info("Working dir = " + builder.directory()); - for (String key : builder.environment().keySet()) { - log.info("Env[" + key + "] = " + builder.environment().get(key)); - } - - //notify start - DurationObj compObj = notifier.computationStarted(); - - //running cmd - Process process = builder.start(); - - final BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); - final BufferedReader err = new BufferedReader(new InputStreamReader(process.getErrorStream())); - final BufferedWriter stdoutWtiter = new BufferedWriter(new FileWriter(context.getExecutionModel().getStdOut())); - final BufferedWriter stdErrWtiter = new BufferedWriter(new FileWriter(context.getExecutionModel().getStderr())); - - Thread t1 = new Thread(new Runnable() { - - public void run() { - try { - String line=null; - while ( (line = in.readLine()) != null){ - log.debug(line); - stdoutWtiter.write(line); - stdoutWtiter.newLine(); - } - } catch (Exception e) { - e.printStackTrace(); - } finally { - if (in != null){ - try { - in.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (stdoutWtiter != null) { - try { - stdoutWtiter.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - } - - }); - - Thread t2 = new Thread(new Runnable() { - - public void run() { - try { - String line=null; - while ((line = err.readLine()) != null){ - log.debug(line); - stdErrWtiter.write(line); - stdErrWtiter.newLine(); - } - } catch (Exception e) { - e.printStackTrace(); - } finally { - if (err != null){ - try { - err.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (stdErrWtiter != null) { - try { - stdErrWtiter.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - - } - - }); - - //start output threads - t1.setDaemon(true); - t2.setDaemon(true); - t1.start(); - t2.start(); - - - // wait for the process (application) to finish executing - int returnValue = process.waitFor(); - - // notify end - notifier.computationFinished(compObj); - - // make sure other two threads are done - t1.join(); - t2.join(); - - /* - * check return value. usually not very helpful to draw conclusions - * based on return values so don't bother. just provide warning in - * the log messages - */ - if (returnValue != 0) { - log.error("Process finished with non zero return value. Process may have failed"); - } else { - log.info("Process finished with return value of zero."); - } - - StringBuffer buf = new StringBuffer(); - buf.append("Executed ") - .append(buildCommand(cmdList)) - .append(" on the localHost, working directory =") - .append(context.getExecutionModel().getWorkingDir()) - .append("tempDirectory =") - .append(context.getExecutionModel().getTmpDir()) - .append("With the status") - .append(String.valueOf(returnValue)); - context.getNotificationService().info(buf.toString()); - - log.info(buf.toString()); - - context.getExecutionModel().setStdoutStr(GfacUtils.readFile(context.getExecutionModel().getStdOut())); - context.getExecutionModel().setStderrStr(GfacUtils.readFile(context.getExecutionModel().getStderr())); - - //set to context - OutputUtils.fillOutputFromStdout(invocationContext.getMessageContext("output"), context.getExecutionModel().getStdoutStr(), context.getExecutionModel().getStderrStr()); - - } catch (IOException e) { - throw new JobSubmissionFault(e, "", "", buildCommand(cmdList), CurrentProviders.Local); - } catch (InterruptedException e) { - throw new GfacException(e, FaultCode.LocalError); - } - - } + private String buildCommand(List cmdList) { + StringBuffer buff = new StringBuffer(); + for (String string : cmdList) { + buff.append(string); + buff.append(SPACE); + } + return buff.toString(); + } + + public void initialize(InvocationContext invocationContext) throws GfacException { + ExecutionContext context = invocationContext.getExecutionContext(); + + log.info("working diectroy = " + context.getExecutionModel().getWorkingDir()); + log.info("temp directory = " + context.getExecutionModel().getTmpDir()); + new File(context.getExecutionModel().getWorkingDir()).mkdir(); + new File(context.getExecutionModel().getTmpDir()).mkdir(); + new File(context.getExecutionModel().getInputDataDir()).mkdir(); + new File(context.getExecutionModel().getOutputDataDir()).mkdir(); + } + + public void execute(InvocationContext invocationContext) throws GfacException { + ExecutionContext context = invocationContext.getExecutionContext(); + + List cmdList = new ArrayList(); + + try { + /* + * Notifier + */ + NotificationService notifier = context.getNotificationService(); + + /* + * Builder Command + */ + cmdList.add(context.getExecutionModel().getExecutable()); + cmdList.addAll(context.getExecutionModel().getInputParameters()); + + // create process builder from command + ProcessBuilder builder = new ProcessBuilder(cmdList); + + // get the env of the host and the application + Map nv = context.getExecutionModel().getEnv(); + builder.environment().putAll(nv); + + // extra env's + builder.environment().put(GFacConstants.INPUT_DATA_DIR, context.getExecutionModel().getInputDataDir()); + builder.environment().put(GFacConstants.OUTPUT_DATA_DIR, context.getExecutionModel().getOutputDataDir()); + + // working directory + builder.directory(new File(context.getExecutionModel().getWorkingDir())); + + // log info + log.info("Command = " + buildCommand(cmdList)); + log.info("Working dir = " + builder.directory()); + for (String key : builder.environment().keySet()) { + log.info("Env[" + key + "] = " + builder.environment().get(key)); + } + + // notify start + DurationObj compObj = notifier.computationStarted(); + + // running cmd + Process process = builder.start(); + + final BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); + final BufferedReader err = new BufferedReader(new InputStreamReader(process.getErrorStream())); + final BufferedWriter stdoutWtiter = new BufferedWriter(new FileWriter(context.getExecutionModel() + .getStdOut())); + final BufferedWriter stdErrWtiter = new BufferedWriter(new FileWriter(context.getExecutionModel() + .getStderr())); + + Thread t1 = new Thread(new Runnable() { + + public void run() { + try { + String line = null; + while ((line = in.readLine()) != null) { + log.debug(line); + stdoutWtiter.write(line); + stdoutWtiter.newLine(); + } + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (in != null) { + try { + in.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (stdoutWtiter != null) { + try { + stdoutWtiter.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } + + }); + + Thread t2 = new Thread(new Runnable() { + + public void run() { + try { + String line = null; + while ((line = err.readLine()) != null) { + log.debug(line); + stdErrWtiter.write(line); + stdErrWtiter.newLine(); + } + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (err != null) { + try { + err.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (stdErrWtiter != null) { + try { + stdErrWtiter.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + } + + }); + + // start output threads + t1.setDaemon(true); + t2.setDaemon(true); + t1.start(); + t2.start(); + + // wait for the process (application) to finish executing + int returnValue = process.waitFor(); + + // notify end + notifier.computationFinished(compObj); + + // make sure other two threads are done + t1.join(); + t2.join(); + + /* + * check return value. usually not very helpful to draw conclusions based on return values so don't bother. + * just provide warning in the log messages + */ + if (returnValue != 0) { + log.error("Process finished with non zero return value. Process may have failed"); + } else { + log.info("Process finished with return value of zero."); + } + + StringBuffer buf = new StringBuffer(); + buf.append("Executed ").append(buildCommand(cmdList)).append(" on the localHost, working directory =") + .append(context.getExecutionModel().getWorkingDir()).append("tempDirectory =") + .append(context.getExecutionModel().getTmpDir()).append("With the status") + .append(String.valueOf(returnValue)); + context.getNotificationService().info(buf.toString()); + + log.info(buf.toString()); + + context.getExecutionModel().setStdoutStr(GfacUtils.readFile(context.getExecutionModel().getStdOut())); + context.getExecutionModel().setStderrStr(GfacUtils.readFile(context.getExecutionModel().getStderr())); + + // set to context + OutputUtils.fillOutputFromStdout(invocationContext.getMessageContext("output"), context.getExecutionModel() + .getStdoutStr(), context.getExecutionModel().getStderrStr()); + + } catch (IOException e) { + throw new JobSubmissionFault(e, "", "", buildCommand(cmdList), CurrentProviders.Local); + } catch (InterruptedException e) { + throw new GfacException(e, FaultCode.LocalError); + } + + } - public void dispose(InvocationContext invocationContext) throws GfacException { + public void dispose(InvocationContext invocationContext) throws GfacException { - } + } - public void abort(InvocationContext invocationContext) throws GfacException { + public void abort(InvocationContext invocationContext) throws GfacException { - } + } } Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/Provider.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/Provider.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/Provider.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/Provider.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + package org.apache.airavata.core.gfac.provider; import org.apache.airavata.core.gfac.context.InvocationContext; @@ -5,11 +26,11 @@ import org.apache.airavata.core.gfac.exc public interface Provider { - void initialize(InvocationContext invocationContext) throws GfacException; + void initialize(InvocationContext invocationContext) throws GfacException; - void execute(InvocationContext invocationContext) throws GfacException; + void execute(InvocationContext invocationContext) throws GfacException; - void dispose(InvocationContext invocationContext) throws GfacException; + void dispose(InvocationContext invocationContext) throws GfacException; - void abort(InvocationContext invocationContext) throws GfacException; + void abort(InvocationContext invocationContext) throws GfacException; } Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/SSHProvider.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/SSHProvider.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/SSHProvider.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/SSHProvider.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + package org.apache.airavata.core.gfac.provider; import java.io.File; @@ -26,203 +47,202 @@ import edu.indiana.extreme.lead.workflow public class SSHProvider extends AbstractProvider { - private static final String SPACE = " "; - - private String buildCommand(List cmdList) { - StringBuffer buff = new StringBuffer(); - for (String string : cmdList) { - buff.append(string); - buff.append(SPACE); - } - return buff.toString(); - } - - @Override - public void initialize(InvocationContext invocationContext) throws GfacException { - ExecutionContext appExecContext = invocationContext.getExecutionContext(); - ExecutionModel model = appExecContext.getExecutionModel(); - - SSHClient ssh = new SSHClient(); - try { - ssh.loadKnownHosts(); - ssh.connect(model.getHost()); - - // TODO how to authenticate with system - ssh.authPublickey(System.getProperty("user.name")); - final Session session = ssh.startSession(); - try { - StringBuilder command = new StringBuilder(); - command.append("mkdir -p "); - command.append(model.getTmpDir()); - command.append(" | "); - command.append("mkdir -p "); - command.append(model.getWorkingDir()); - command.append(" | "); - command.append("mkdir -p "); - command.append(model.getInputDataDir()); - command.append(" | "); - command.append("mkdir -p "); - command.append(model.getOutputDataDir()); - Command cmd = session.exec(command.toString()); - cmd.join(5, TimeUnit.SECONDS); - } catch (Exception e) { - throw e; - } finally { - try { - session.close(); - } catch (Exception e) { - } - } - } catch (Exception e) { - throw new GfacException(e.getMessage(), e); - } finally { - try { - ssh.disconnect(); - } catch (Exception e) { - } - } - } - - @Override - public void execute(InvocationContext invocationContext) throws GfacException { - ExecutionContext context = invocationContext.getExecutionContext(); - ExecutionModel model = context.getExecutionModel(); - - List cmdList = new ArrayList(); - - SSHClient ssh = new SSHClient(); - try { - - /* - * Notifier - */ - NotificationService notifier = context.getNotificationService(); - - /* - * Builder Command - */ - cmdList.add(context.getExecutionModel().getExecutable()); - cmdList.addAll(context.getExecutionModel().getInputParameters()); - - // create process builder from command - String command = buildCommand(cmdList); - - //redirect StdOut and StdErr - command += SPACE + "1>" + SPACE + model.getStdOut(); - command += SPACE + "2>" + SPACE + model.getStderr(); - - // get the env of the host and the application - Map nv = context.getExecutionModel().getEnv(); - - // extra env's - nv.put(GFacConstants.INPUT_DATA_DIR, context.getExecutionModel().getInputDataDir()); - nv.put(GFacConstants.OUTPUT_DATA_DIR, context.getExecutionModel().getOutputDataDir()); - - // log info - log.info("Command = " + buildCommand(cmdList)); - for (String key : nv.keySet()) { - log.info("Env[" + key + "] = " + nv.get(key)); - } - - // notify start - DurationObj compObj = notifier.computationStarted(); - - /* - * Create ssh connection - */ - ssh.loadKnownHosts(); - ssh.connect(model.getHost()); - - // TODO how to authenticate with system - ssh.authPublickey(System.getProperty("user.name")); - - final Session session = ssh.startSession(); - try { - /* - * Build working Directory - */ - log.info("WorkingDir = " + model.getWorkingDir()); - session.exec("mkdir -p " + model.getWorkingDir()); - session.exec("cd " + model.getWorkingDir()); - - /* - * Set environment - */ - for (String key : nv.keySet()) { - session.setEnvVar(key, nv.get(key)); - } - - /* - * Execute - */ - Command cmd = session.exec(command); - log.info("stdout=" + GfacUtils.readFromStream(session.getInputStream())); - cmd.join(5, TimeUnit.SECONDS); - - - // notify end - notifier.computationFinished(compObj); - - /* - * check return value. usually not very helpful to draw conclusions - * based on return values so don't bother. just provide warning in - * the log messages - */ - if (cmd.getExitStatus() != 0) { - log.error("Process finished with non zero return value. Process may have failed"); - } else { - log.info("Process finished with return value of zero."); - } - - File logDir = new File("./service_logs"); - if (!logDir.exists()) { - logDir.mkdir(); - } - - // Get the Stdouts and StdErrs - QName x = QName.valueOf(invocationContext.getServiceName()); - String timeStampedServiceName = GfacUtils.createServiceDirName(x); - File localStdOutFile = new File(logDir, timeStampedServiceName + ".stdout"); - File localStdErrFile = new File(logDir, timeStampedServiceName + ".stderr"); - - SCPFileTransfer fileTransfer = ssh.newSCPFileTransfer(); - fileTransfer.download(model.getStdOut(), localStdOutFile.getAbsolutePath()); - fileTransfer.download(model.getStderr(), localStdErrFile.getAbsolutePath()); - - context.getExecutionModel().setStdoutStr(GfacUtils.readFile(localStdOutFile.getAbsolutePath())); - context.getExecutionModel().setStderrStr(GfacUtils.readFile(localStdErrFile.getAbsolutePath())); - - // set to context - OutputUtils.fillOutputFromStdout(invocationContext.getMessageContext("output"), context.getExecutionModel().getStdoutStr(), context.getExecutionModel().getStderrStr()); - - } catch (Exception e) { - throw e; - } finally { - try { - session.close(); - } catch (Exception e) { - } - } - } catch (Exception e) { - throw new GfacException(e.getMessage(), e); - } finally { - try { - ssh.disconnect(); - } catch (Exception e) { - } - } - } - - @Override - public void dispose(InvocationContext invocationContext) throws GfacException { - // TODO Auto-generated method stub - - } - - @Override - public void abort(InvocationContext invocationContext) throws GfacException { - // TODO Auto-generated method stub + private static final String SPACE = " "; + + private String buildCommand(List cmdList) { + StringBuffer buff = new StringBuffer(); + for (String string : cmdList) { + buff.append(string); + buff.append(SPACE); + } + return buff.toString(); + } + + @Override + public void initialize(InvocationContext invocationContext) throws GfacException { + ExecutionContext appExecContext = invocationContext.getExecutionContext(); + ExecutionModel model = appExecContext.getExecutionModel(); + + SSHClient ssh = new SSHClient(); + try { + ssh.loadKnownHosts(); + ssh.connect(model.getHost()); + + // TODO how to authenticate with system + ssh.authPublickey(System.getProperty("user.name")); + final Session session = ssh.startSession(); + try { + StringBuilder command = new StringBuilder(); + command.append("mkdir -p "); + command.append(model.getTmpDir()); + command.append(" | "); + command.append("mkdir -p "); + command.append(model.getWorkingDir()); + command.append(" | "); + command.append("mkdir -p "); + command.append(model.getInputDataDir()); + command.append(" | "); + command.append("mkdir -p "); + command.append(model.getOutputDataDir()); + Command cmd = session.exec(command.toString()); + cmd.join(5, TimeUnit.SECONDS); + } catch (Exception e) { + throw e; + } finally { + try { + session.close(); + } catch (Exception e) { + } + } + } catch (Exception e) { + throw new GfacException(e.getMessage(), e); + } finally { + try { + ssh.disconnect(); + } catch (Exception e) { + } + } + } + + @Override + public void execute(InvocationContext invocationContext) throws GfacException { + ExecutionContext context = invocationContext.getExecutionContext(); + ExecutionModel model = context.getExecutionModel(); + + List cmdList = new ArrayList(); + + SSHClient ssh = new SSHClient(); + try { + + /* + * Notifier + */ + NotificationService notifier = context.getNotificationService(); + + /* + * Builder Command + */ + cmdList.add(context.getExecutionModel().getExecutable()); + cmdList.addAll(context.getExecutionModel().getInputParameters()); + + // create process builder from command + String command = buildCommand(cmdList); + + // redirect StdOut and StdErr + command += SPACE + "1>" + SPACE + model.getStdOut(); + command += SPACE + "2>" + SPACE + model.getStderr(); + + // get the env of the host and the application + Map nv = context.getExecutionModel().getEnv(); + + // extra env's + nv.put(GFacConstants.INPUT_DATA_DIR, context.getExecutionModel().getInputDataDir()); + nv.put(GFacConstants.OUTPUT_DATA_DIR, context.getExecutionModel().getOutputDataDir()); + + // log info + log.info("Command = " + buildCommand(cmdList)); + for (String key : nv.keySet()) { + log.info("Env[" + key + "] = " + nv.get(key)); + } + + // notify start + DurationObj compObj = notifier.computationStarted(); + + /* + * Create ssh connection + */ + ssh.loadKnownHosts(); + ssh.connect(model.getHost()); + + // TODO how to authenticate with system + ssh.authPublickey(System.getProperty("user.name")); + + final Session session = ssh.startSession(); + try { + /* + * Build working Directory + */ + log.info("WorkingDir = " + model.getWorkingDir()); + session.exec("mkdir -p " + model.getWorkingDir()); + session.exec("cd " + model.getWorkingDir()); + + /* + * Set environment + */ + for (String key : nv.keySet()) { + session.setEnvVar(key, nv.get(key)); + } + + /* + * Execute + */ + Command cmd = session.exec(command); + log.info("stdout=" + GfacUtils.readFromStream(session.getInputStream())); + cmd.join(5, TimeUnit.SECONDS); + + // notify end + notifier.computationFinished(compObj); + + /* + * check return value. usually not very helpful to draw conclusions based on return values so don't + * bother. just provide warning in the log messages + */ + if (cmd.getExitStatus() != 0) { + log.error("Process finished with non zero return value. Process may have failed"); + } else { + log.info("Process finished with return value of zero."); + } + + File logDir = new File("./service_logs"); + if (!logDir.exists()) { + logDir.mkdir(); + } + + // Get the Stdouts and StdErrs + QName x = QName.valueOf(invocationContext.getServiceName()); + String timeStampedServiceName = GfacUtils.createServiceDirName(x); + File localStdOutFile = new File(logDir, timeStampedServiceName + ".stdout"); + File localStdErrFile = new File(logDir, timeStampedServiceName + ".stderr"); + + SCPFileTransfer fileTransfer = ssh.newSCPFileTransfer(); + fileTransfer.download(model.getStdOut(), localStdOutFile.getAbsolutePath()); + fileTransfer.download(model.getStderr(), localStdErrFile.getAbsolutePath()); + + context.getExecutionModel().setStdoutStr(GfacUtils.readFile(localStdOutFile.getAbsolutePath())); + context.getExecutionModel().setStderrStr(GfacUtils.readFile(localStdErrFile.getAbsolutePath())); + + // set to context + OutputUtils.fillOutputFromStdout(invocationContext.getMessageContext("output"), context + .getExecutionModel().getStdoutStr(), context.getExecutionModel().getStderrStr()); + + } catch (Exception e) { + throw e; + } finally { + try { + session.close(); + } catch (Exception e) { + } + } + } catch (Exception e) { + throw new GfacException(e.getMessage(), e); + } finally { + try { + ssh.disconnect(); + } catch (Exception e) { + } + } + } + + @Override + public void dispose(InvocationContext invocationContext) throws GfacException { + // TODO Auto-generated method stub + + } + + @Override + public void abort(InvocationContext invocationContext) throws GfacException { + // TODO Auto-generated method stub - } + } } Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/GramRSLGenerator.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/GramRSLGenerator.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/GramRSLGenerator.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/GramRSLGenerator.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + package org.apache.airavata.core.gfac.provider.utils; import java.util.Iterator; @@ -15,131 +36,132 @@ import org.ogce.schemas.gfac.documents.R import org.slf4j.Logger; import org.slf4j.LoggerFactory; +public class GramRSLGenerator { + protected final static Logger log = LoggerFactory.getLogger(GramRSLGenerator.class); + private enum JobType { + SINGLE, MPI, MULTIPLE, CONDOR + }; + + public static GramAttributes configureRemoteJob(ExecutionContext appExecContext) throws GfacException { + GramAttributes jobAttr = new GramAttributes(); + jobAttr.setExecutable(appExecContext.getExecutionModel().getExecutable()); + jobAttr.setDirectory(appExecContext.getExecutionModel().getWorkingDir()); + jobAttr.setStdout(appExecContext.getExecutionModel().getStdOut()); + jobAttr.setStderr(appExecContext.getExecutionModel().getStderr()); + + // The env here contains the env of the host and the application. i.e + // the env specified in + // the host description and application description documents + Map nv = appExecContext.getExecutionModel().getEnv(); -public class GramRSLGenerator { - protected final static Logger log = LoggerFactory.getLogger(GramRSLGenerator.class); - private enum JobType{SINGLE,MPI,MULTIPLE,CONDOR}; - - public static GramAttributes configureRemoteJob(ExecutionContext appExecContext) throws GfacException { - GramAttributes jobAttr = new GramAttributes(); - jobAttr.setExecutable(appExecContext.getExecutionModel().getExecutable()); - jobAttr.setDirectory(appExecContext.getExecutionModel().getWorkingDir()); - jobAttr.setStdout(appExecContext.getExecutionModel().getStdOut()); - jobAttr.setStderr(appExecContext.getExecutionModel().getStderr()); - - // The env here contains the env of the host and the application. i.e - // the env specified in - // the host description and application description documents - Map nv = appExecContext.getExecutionModel().getEnv(); - - for (String key : nv.keySet()) { - jobAttr.addEnvVariable(key, nv.get(key)); - } + for (String key : nv.keySet()) { + jobAttr.addEnvVariable(key, nv.get(key)); + } - jobAttr.addEnvVariable(GFacConstants.INPUT_DATA_DIR, appExecContext.getExecutionModel().getInputDataDir()); + jobAttr.addEnvVariable(GFacConstants.INPUT_DATA_DIR, appExecContext.getExecutionModel().getInputDataDir()); String outputDataDir = GFacConstants.OUTPUT_DATA_DIR; - if(!outputDataDir.isEmpty()){ - jobAttr.addEnvVariable(outputDataDir, appExecContext.getExecutionModel().getOutputDataDir()); + if (!outputDataDir.isEmpty()) { + jobAttr.addEnvVariable(outputDataDir, appExecContext.getExecutionModel().getOutputDataDir()); + } + ApplicationDescriptionType app = appExecContext.getExecutionModel().getAplicationDesc(); + WorkflowContextHeader contextHeader = appExecContext.getWorkflowHeader(); + ResourceMapping resourceMapping = null; + if (contextHeader != null) { + resourceMapping = contextHeader.getResourceMappings().getResourceMappingArray(0); + } + + log.info("Configure using App Desc = " + app.xmlText()); + if (resourceMapping != null && resourceMapping.getMaxWallTime() > 0) { + log.info("Header Setting Max Wall Time" + resourceMapping.getMaxWallTime()); + jobAttr.setMaxWallTime(resourceMapping.getMaxWallTime()); + + } else if (app.getMaxWallTime() > 0) { + log.info("Setting max wall clock time to " + app.getMaxWallTime()); + + if (app.getMaxWallTime() > 30 && app.getQueue() != null && app.getQueue().equals("debug")) { + throw new GfacException("NCSA debug Queue only support jobs < 30 minutes", FaultCode.InvalidConfig); + } + + jobAttr.setMaxWallTime(app.getMaxWallTime()); + jobAttr.set("proxy_timeout", "1"); + } else { + jobAttr.setMaxWallTime(29); + } + + if (appExecContext.getExecutionModel().getStdIn() != null) { + jobAttr.setStdin(appExecContext.getExecutionModel().getStdIn()); + } else { + Iterator values = appExecContext.getExecutionModel().getInputParameters().iterator(); + while (values.hasNext()) { + jobAttr.addArgument(values.next()); + } + } + + if (resourceMapping != null && resourceMapping.getNodeCount() > 0) { + log.info("Setting number of procs to " + resourceMapping.getNodeCount()); + jobAttr.set("hostCount", String.valueOf(resourceMapping.getNodeCount())); + } else if (app.getHostCount() > 1) { + jobAttr.set("hostCount", String.valueOf(app.getHostCount())); + } + if (resourceMapping != null && resourceMapping.getCpuCount() > 0) { + log.info("Setting host count to " + resourceMapping.getCpuCount()); + jobAttr.setNumProcs(resourceMapping.getCpuCount()); + + } else if (app.getCount() > 1) { + log.info("Setting number of procs to " + app.getCount()); + jobAttr.setNumProcs(app.getCount()); + } + + if (app.getProject() != null && app.getProject().getProjectName() != null) { + log.info("Setting project to " + app.getProject()); + jobAttr.setProject(app.getProject().getProjectName()); + } + + if (resourceMapping != null && resourceMapping.getQueueName() != null) { + jobAttr.setQueue(resourceMapping.getQueueName()); + } else if (app.getQueue() != null && app.getQueue().getQueueName() != null) { + log.info("Setting job queue to " + app.getQueue()); + jobAttr.setQueue(app.getQueue().getQueueName()); + } + String jobType = JobType.SINGLE.toString(); + + if (app.getJobType() != null) { + jobType = app.getJobType().toString(); + } + if (jobType.equalsIgnoreCase(JobType.SINGLE.toString())) { + log.info("Setting job type to single"); + jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE); + } else if (jobType.equalsIgnoreCase(JobType.MPI.toString())) { + log.info("Setting job type to mpi"); + jobAttr.setJobType(GramAttributes.JOBTYPE_MPI); + } else if (jobType.equalsIgnoreCase(JobType.MULTIPLE.toString())) { + log.info("Setting job type to multiple"); + jobAttr.setJobType(GramAttributes.JOBTYPE_MULTIPLE); + } else if (jobType.equalsIgnoreCase(JobType.CONDOR.toString())) { + jobAttr.setJobType(GramAttributes.JOBTYPE_CONDOR); + } + + // Support to add the Additional RSL parameters + RSLParmType[] rslParams = app.getRslparmArray(); + if (rslParams.length > 0) { + for (RSLParmType rslType : rslParams) { + log.info("Adding rsl param of [" + rslType.getName() + "," + rslType.getStringValue() + "]"); + if (rslType.getName() != "") { + jobAttr.set(rslType.getName(), rslType.getStringValue()); + } + } + } + + // support urgency/SPRUCE case + // only add spruce rsl parameter if this host has a spruce jobmanager + // configured + if (appExecContext.getWorkflowHeader() != null && appExecContext.getWorkflowHeader().getURGENCY() != null + // && GfacUtils.getSpruceGatekeeper(appExecContext) != null + ) { + jobAttr.set("urgency", appExecContext.getWorkflowHeader().getURGENCY()); } - ApplicationDescriptionType app = appExecContext.getExecutionModel().getAplicationDesc(); - WorkflowContextHeader contextHeader = appExecContext.getWorkflowHeader(); - ResourceMapping resourceMapping = null; - if (contextHeader != null) { - resourceMapping = contextHeader.getResourceMappings().getResourceMappingArray(0); - } - - log.info("Configure using App Desc = " + app.xmlText()); - if (resourceMapping != null && resourceMapping.getMaxWallTime() > 0) { - log.info("Header Setting Max Wall Time" + resourceMapping.getMaxWallTime()); - jobAttr.setMaxWallTime(resourceMapping.getMaxWallTime()); - - } else if (app.getMaxWallTime() > 0) { - log.info("Setting max wall clock time to " + app.getMaxWallTime()); - - if (app.getMaxWallTime() > 30 && app.getQueue() != null && app.getQueue().equals("debug")) { - throw new GfacException("NCSA debug Queue only support jobs < 30 minutes", FaultCode.InvalidConfig); - } - - jobAttr.setMaxWallTime(app.getMaxWallTime()); - jobAttr.set("proxy_timeout", "1"); - } else { - jobAttr.setMaxWallTime(29); - } - - if (appExecContext.getExecutionModel().getStdIn() != null) { - jobAttr.setStdin(appExecContext.getExecutionModel().getStdIn()); - } else { - Iterator values = appExecContext.getExecutionModel().getInputParameters().iterator(); - while (values.hasNext()) { - jobAttr.addArgument(values.next()); - } - } - - if (resourceMapping != null && resourceMapping.getNodeCount() > 0) { - log.info("Setting number of procs to " + resourceMapping.getNodeCount()); - jobAttr.set("hostCount", String.valueOf(resourceMapping.getNodeCount())); - }else if (app.getHostCount() > 1) { - jobAttr.set("hostCount", String.valueOf(app.getHostCount())); - } - if (resourceMapping != null && resourceMapping.getCpuCount() > 0) { - log.info("Setting host count to " + resourceMapping.getCpuCount()); - jobAttr.setNumProcs(resourceMapping.getCpuCount()); - - } else if (app.getCount() > 1) { - log.info("Setting number of procs to " + app.getCount()); - jobAttr.setNumProcs(app.getCount()); - } - - if (app.getProject() != null && app.getProject().getProjectName() != null) { - log.info("Setting project to " + app.getProject()); - jobAttr.setProject(app.getProject().getProjectName()); - } - - if (resourceMapping != null && resourceMapping.getQueueName() != null) { - jobAttr.setQueue(resourceMapping.getQueueName()); - } else if (app.getQueue() != null && app.getQueue().getQueueName() != null) { - log.info("Setting job queue to " + app.getQueue()); - jobAttr.setQueue(app.getQueue().getQueueName()); - } - String jobType = JobType.SINGLE.toString() ; - - if (app.getJobType() != null) { - jobType = app.getJobType().toString(); - } - if (jobType.equalsIgnoreCase(JobType.SINGLE.toString())) { - log.info("Setting job type to single"); - jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE); - } else if (jobType.equalsIgnoreCase(JobType.MPI.toString())) { - log.info("Setting job type to mpi"); - jobAttr.setJobType(GramAttributes.JOBTYPE_MPI); - } else if (jobType.equalsIgnoreCase(JobType.MULTIPLE.toString())) { - log.info("Setting job type to multiple"); - jobAttr.setJobType(GramAttributes.JOBTYPE_MULTIPLE); - } else if (jobType.equalsIgnoreCase(JobType.CONDOR.toString())) { - jobAttr.setJobType(GramAttributes.JOBTYPE_CONDOR); - } - - // Support to add the Additional RSL parameters - RSLParmType[] rslParams = app.getRslparmArray(); - if (rslParams.length > 0) { - for (RSLParmType rslType : rslParams) { - log.info("Adding rsl param of [" + rslType.getName() + "," + rslType.getStringValue() + "]"); - if (rslType.getName() != "") { - jobAttr.set(rslType.getName(), rslType.getStringValue()); - } - } - } - - // support urgency/SPRUCE case - // only add spruce rsl parameter if this host has a spruce jobmanager - // configured - if (appExecContext.getWorkflowHeader() != null && appExecContext.getWorkflowHeader().getURGENCY() != null - //&& GfacUtils.getSpruceGatekeeper(appExecContext) != null - ) { - jobAttr.set("urgency", appExecContext.getWorkflowHeader().getURGENCY()); - } - return jobAttr; - } + return jobAttr; + } } Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/JobSubmissionListener.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/JobSubmissionListener.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/JobSubmissionListener.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/provider/utils/JobSubmissionListener.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,24 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + package org.apache.airavata.core.gfac.provider.utils; import org.apache.airavata.core.gfac.context.ExecutionContext; @@ -12,82 +33,82 @@ import org.ietf.jgss.GSSException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class JobSubmissionListener implements GramJobListener { - private boolean finished; - private int error; - private int status; - private ExecutionContext executionContext; - private GramJob job; - protected final Logger log = LoggerFactory.getLogger(JobSubmissionListener.class); - - public JobSubmissionListener(GramJob job, ExecutionContext executionContext) { - this.job = job; - this.executionContext = executionContext; - } - // waits for DONE or FAILED status - public void waitFor() throws InterruptedException,GSSException, GfacException, GramException { - while (!finished) { - int proxyExpTime = job.getCredentials().getRemainingLifetime(); - if(proxyExpTime < 900){ - log.info("Job proxy expired. Trying to renew proxy"); - GSSCredential newgssCred = ((GSISecurityContext)executionContext.getSecurityContext()).getGssCredentails(); - job.renew(newgssCred); - } - // job status is changed but method isn't invoked - if (status != 0) { - if (job.getStatus() != status) { - log.info("invoke method manually"); - statusChanged(job); - } else { - log.info("job " + job.getIDAsString() + " have same status: " + GramJob.getStatusAsString(status)); - } - } else { - log.info("Status is zero"); - } - - synchronized (this) { - wait(60 * 1000l); - } - } - } - - public synchronized void statusChanged(GramJob job) { - int jobStatus = job.getStatus(); - String jobId = job.getIDAsString(); - String statusString = job.getStatusAsString(); - String jobStatusMessage = GfacUtils.formatJobStatus(jobId, statusString); + private boolean finished; + private int error; + private int status; + private ExecutionContext executionContext; + private GramJob job; + protected final Logger log = LoggerFactory.getLogger(JobSubmissionListener.class); + + public JobSubmissionListener(GramJob job, ExecutionContext executionContext) { + this.job = job; + this.executionContext = executionContext; + } + + // waits for DONE or FAILED status + public void waitFor() throws InterruptedException, GSSException, GfacException, GramException { + while (!finished) { + int proxyExpTime = job.getCredentials().getRemainingLifetime(); + if (proxyExpTime < 900) { + log.info("Job proxy expired. Trying to renew proxy"); + GSSCredential newgssCred = ((GSISecurityContext) executionContext.getSecurityContext()) + .getGssCredentails(); + job.renew(newgssCred); + } + // job status is changed but method isn't invoked + if (status != 0) { + if (job.getStatus() != status) { + log.info("invoke method manually"); + statusChanged(job); + } else { + log.info("job " + job.getIDAsString() + " have same status: " + GramJob.getStatusAsString(status)); + } + } else { + log.info("Status is zero"); + } + + synchronized (this) { + wait(60 * 1000l); + } + } + } + + public synchronized void statusChanged(GramJob job) { + int jobStatus = job.getStatus(); + String jobId = job.getIDAsString(); + String statusString = job.getStatusAsString(); + String jobStatusMessage = GfacUtils.formatJobStatus(jobId, statusString); log.info(jobStatusMessage); status = jobStatus; - executionContext.getNotificationService().info(jobStatusMessage); - if (jobStatus == GramJob.STATUS_DONE) { - finished = true; - } else if (jobStatus == GramJob.STATUS_FAILED) { - finished = true; - error = job.getError(); - log.info("Job Error Code: " + error); - } - - if (finished) { - notify(); - } - } - - - public int getError() { - return error; - } - - public int getStatus() { - return status; - } - - public void wakeup() { - try { - notify(); - } catch (Exception e) { - e.printStackTrace(); - } - } + executionContext.getNotificationService().info(jobStatusMessage); + if (jobStatus == GramJob.STATUS_DONE) { + finished = true; + } else if (jobStatus == GramJob.STATUS_FAILED) { + finished = true; + error = job.getError(); + log.info("Job Error Code: " + error); + } + + if (finished) { + notify(); + } + } + + public int getError() { + return error; + } + + public int getStatus() { + return status; + } + + public void wakeup() { + try { + notify(); + } catch (Exception e) { + e.printStackTrace(); + } + } } Modified: incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/registry/RegistryService.java URL: http://svn.apache.org/viewvc/incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/registry/RegistryService.java?rev=1139550&r1=1139549&r2=1139550&view=diff ============================================================================== --- incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/registry/RegistryService.java (original) +++ incubator/airavata/core/trunk/gfac/src/main/java/org/apache/airavata/core/gfac/registry/RegistryService.java Sat Jun 25 15:17:35 2011 @@ -1,3 +1,23 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ package org.apache.airavata.core.gfac.registry; @@ -5,42 +25,53 @@ import javax.xml.namespace.QName; import org.apache.airavata.core.gfac.exception.GfacException; - public interface RegistryService { - public void registerConcreteWsdl(String wsdlAsStr,int lifetimeAsSeconds)throws GfacException; - public String getConcreateWsdl(String wsdlQName)throws GfacException; - public void removeConcreteWsdl(String wsdlQName)throws GfacException; - + public void registerConcreteWsdl(String wsdlAsStr, int lifetimeAsSeconds) throws GfacException; + + public String getConcreateWsdl(String wsdlQName) throws GfacException; + + public void removeConcreteWsdl(String wsdlQName) throws GfacException; + public String getAbstractWsdl(String wsdlQName) throws GfacException; + public void removeAwsdl(String wsdlQName) throws GfacException; - - public void registerServiceMap(String serviceMapAsStr,String abstractWsdlAsString)throws GfacException; - public void removeServiceMap(String serviceQName)throws GfacException; - public String getServiceMap(String serviceQName)throws GfacException; - - public void registerHostDesc(String hostDescAsStr)throws GfacException; - public String getHostDesc(String hostName)throws GfacException; - public void removeHostDesc(String hostName)throws GfacException; - - public void registerAppDesc(String appDescAsStr)throws GfacException; - public String getAppDesc(String appQName,String hostName)throws GfacException; - public void removeAppDesc(String appQName,String hostName)throws GfacException; - - public void registerOutputFiles(QName resourceId, String resourceName, String resourceType, - String resourceDesc, String resourceDocument, String resourceParentTypedID, - String owner)throws GfacException; - - public String[] findService(String serviceName)throws GfacException; - public String[] findServiceDesc(String serviceName)throws GfacException; + + public void registerServiceMap(String serviceMapAsStr, String abstractWsdlAsString) throws GfacException; + + public void removeServiceMap(String serviceQName) throws GfacException; + + public String getServiceMap(String serviceQName) throws GfacException; + + public void registerHostDesc(String hostDescAsStr) throws GfacException; + + public String getHostDesc(String hostName) throws GfacException; + + public void removeHostDesc(String hostName) throws GfacException; + + public void registerAppDesc(String appDescAsStr) throws GfacException; + + public String getAppDesc(String appQName, String hostName) throws GfacException; + + public void removeAppDesc(String appQName, String hostName) throws GfacException; + + public void registerOutputFiles(QName resourceId, String resourceName, String resourceType, String resourceDesc, + String resourceDocument, String resourceParentTypedID, String owner) throws GfacException; + + public String[] findService(String serviceName) throws GfacException; + + public String[] findServiceDesc(String serviceName) throws GfacException; + public String[] findAppDesc(String query) throws GfacException; - - public String[] listHosts()throws GfacException; - public String[] listApps()throws GfacException; - public String[] app2Hosts(String appName)throws GfacException; - public String[] listAwsdl()throws GfacException; - + + public String[] listHosts() throws GfacException; + + public String[] listApps() throws GfacException; + + public String[] app2Hosts(String appName) throws GfacException; + + public String[] listAwsdl() throws GfacException; + public boolean isAuthorizedToAcsses(String resourceID, String actor, String action) throws GfacException; - -} +}