Package org.apache.airavata.gfac.core.handler

Examples of org.apache.airavata.gfac.core.handler.GFacHandlerException


    private void makeFileSystemDir(String dir, JobExecutionContext jobExecutionContext) throws GFacHandlerException {
           File f = new File(dir);
           if (f.isDirectory() && f.exists()) {
               return;
           } else if (!new File(dir).mkdir()) {
               throw new GFacHandlerException("Cannot create directory " + dir);
           }
    }
View Full Code Here


    private void makeFileSystemDir(String dir, JobExecutionContext jobExecutionContext) throws GFacHandlerException {
           File f = new File(dir);
           if (f.isDirectory() && f.exists()) {
               return;
           } else if (!new File(dir).mkdir()) {
               throw new GFacHandlerException("Cannot make directory "+dir);
           }
    }
View Full Code Here

                TaskDetails taskData = null;
                try {
                    taskData = (TaskDetails) jobExecutionContext.getRegistry().get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
                } catch (RegistryException e) {
                    log.error("Error retrieving job details from Registry");
                    throw new GFacHandlerException("Error retrieving job details from Registry", e);
                }
                JobDetails jobDetails = taskData.getJobDetailsList().get(0);
                String jobDescription = jobDetails.getJobDescription();
                if (jobDescription != null) {
                    JobDescriptor jobDescriptor = null;
                    try {
                        jobDescriptor = JobDescriptor.fromXML(jobDescription);
                    } catch (XmlException e1) {
                        e1.printStackTrace()//To change body of catch statement use File | Settings | File Templates.
                    }
                    applicationDeploymentDescription.getType().setScratchWorkingDirectory(
                            jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
                    applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
                    applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
                    applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
                    applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
                }
            }
        }
        try {
            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {

                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
            }
        } catch (ApplicationSettingsException e) {
            log.error(e.getMessage());
            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
        } catch (GFacException e) {
            log.error(e.getMessage());
            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
        }
        super.invoke(jobExecutionContext);
        DataTransferDetails detail = new DataTransferDetails();
        TransferStatus status = new TransferStatus();

        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
                .getApplicationDeploymentDescription().getType();
        try {
            Cluster cluster = null;
            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
            } else {
                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
            }
            if (cluster == null) {
                throw new GFacProviderException("Security context is not set properly");
            } else {
                log.info("Successfully retrieved the Security Context");
            }

            // Get the Stdouts and StdErrs
            String timeStampedServiceName = GFacUtils.createUniqueNameForService(jobExecutionContext.getServiceName());

            TaskDetails taskData = jobExecutionContext.getTaskData();
            String outputDataDir = null;
            File localStdOutFile;
            File localStdErrFile;

            if (taskData.getAdvancedOutputDataHandling() != null) {
                outputDataDir = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
            }
            if (outputDataDir == null) {
                outputDataDir = File.separator + "tmp";
            }
            outputDataDir = outputDataDir + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID();
            (new File(outputDataDir)).mkdirs();


            localStdOutFile = new File(outputDataDir + File.separator + timeStampedServiceName + "stdout");
            localStdErrFile = new File(outputDataDir + File.separator + timeStampedServiceName + "stderr");
//            cluster.makeDirectory(outputDataDir);
            cluster.scpFrom(app.getStandardOutput(), localStdOutFile.getAbsolutePath());
            Thread.sleep(1000);
            cluster.scpFrom(app.getStandardError(), localStdErrFile.getAbsolutePath());
            Thread.sleep(1000);

            String stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
            String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
            status.setTransferState(TransferState.COMPLETE);
            detail.setTransferStatus(status);
            detail.setTransferDescription("STDOUT:" + stdOutStr);
            registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());

            status.setTransferState(TransferState.COMPLETE);
            detail.setTransferStatus(status);
            detail.setTransferDescription("STDERR:" + stdErrStr);
            registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());

            //todo this is a mess we have to fix this
            List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
            Set<String> keys = output.keySet();
            for (String paramName : keys) {
                ActualParameter actualParameter = (ActualParameter) output.get(paramName);
                if ("URI".equals(actualParameter.getType().getType().toString())) {

                    List<String> outputList = cluster.listDirectory(app.getOutputDataDirectory());
                    if (outputList.size() == 0 || outputList.get(0).isEmpty()) {
                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
                        Set<String> strings = output.keySet();
                        outputArray.clear();
                        for(String key:strings) {
                            ActualParameter actualParameter1 = (ActualParameter) output.get(key);
                            if("URI".equals(actualParameter1.getType().getType().toString())){
                              String downloadFile = MappingFactory.toString(actualParameter1);
                              cluster.scpFrom(downloadFile, outputDataDir);
                              String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
                              String localFile = outputDataDir +  File.separator +fileName;
                jobExecutionContext.addOutputFile(localFile);
                MappingFactory.fromString(actualParameter1, localFile);
                DataObjectType dataObjectType = new DataObjectType();
                                dataObjectType.setValue(localFile);
                                dataObjectType.setKey(key);
                                dataObjectType.setType(DataType.URI);
                                outputArray.add(dataObjectType);
                            }
                        }
                        break;
                    } else {
                        String valueList = outputList.get(0);
                        cluster.scpFrom(app.getOutputDataDirectory() + File.separator + valueList, outputDataDir);
                        jobExecutionContext.addOutputFile(outputDataDir + File.separator + valueList);
                        DataObjectType dataObjectType = new DataObjectType();
                        dataObjectType.setValue(valueList);
                        dataObjectType.setKey(paramName);
                        dataObjectType.setType(DataType.URI);
                        outputArray.add(dataObjectType);
                    }
                } else {
                    OutputUtils.fillOutputFromStdout(output, stdOutStr,stdErrStr, outputArray);
                    break;
                }
            }
            if (outputArray == null || outputArray.isEmpty()) {
                throw new GFacHandlerException(
                        "Empty Output returned from the Application, Double check the application"
                                + "and ApplicationDescriptor output Parameter Names");
            }
            app.setStandardError(localStdErrFile.getAbsolutePath());
            app.setStandardOutput(localStdOutFile.getAbsolutePath());
            app.setOutputDataDirectory(outputDataDir);
            status.setTransferState(TransferState.DOWNLOAD);
            detail.setTransferStatus(status);
            detail.setTransferDescription(outputDataDir);
            registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
            registry.add(ChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
        } catch (XmlException e) {
            throw new GFacHandlerException("Cannot read output:" + e.getMessage(), e);
        } catch (ConnectionException e) {
            throw new GFacHandlerException(e.getMessage(), e);
        } catch (TransportException e) {
            throw new GFacHandlerException(e.getMessage(), e);
        } catch (IOException e) {
            throw new GFacHandlerException(e.getMessage(), e);
        } catch (Exception e) {
            try {
                status.setTransferState(TransferState.FAILED);
                detail.setTransferStatus(status);
                detail.setTransferDescription(e.getLocalizedMessage());
                registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
                GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
            } catch (Exception e1) {
                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
            }
            throw new GFacHandlerException("Error in retrieving results", e);
        }

    }
View Full Code Here

            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
                try {
                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
                } catch (ApplicationSettingsException e) {
                    log.error(e.getMessage());
                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
                }
            }
            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
                    .getApplicationDeploymentDescription().getType();
            String standardError = app.getStandardError();
            String standardOutput = app.getStandardOutput();
            String outputDataDirectory = app.getOutputDataDirectory();
            super.invoke(jobExecutionContext);
            AuthenticationInfo authenticationInfo = null;
            if (password != null) {
                authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
            } else {
                authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
                        this.passPhrase);
            }
            // Server info
            ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);

            Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/torque-4.2.3.1/bin/"));
            outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
                    + File.separator;
            pbsCluster.makeDirectory(outputPath);
            pbsCluster.scpTo(outputPath, standardError);
            pbsCluster.scpTo(outputPath, standardOutput);
            List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
            Set<String> keys = output.keySet();
            for (String paramName : keys) {
                ActualParameter actualParameter = (ActualParameter) output.get(paramName);
                if ("URI".equals(actualParameter.getType().getType().toString())) {
                  String downloadFile = MappingFactory.toString(actualParameter);
                  pbsCluster.scpTo(outputPath, downloadFile);
                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
                    DataObjectType dataObjectType = new DataObjectType();
                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
                    dataObjectType.setKey(paramName);
                    dataObjectType.setType(DataType.URI);
                    outputArray.add(dataObjectType);
                }
             }
           registry.add(ChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
        } catch (SSHApiException e) {
            log.error("Error transfering files to remote host : " + hostName + " with the user: " + userName);
            log.error(e.getMessage());
            throw new GFacHandlerException(e);
        } catch (Exception e) {
            throw new GFacHandlerException(e);
        }
    }
View Full Code Here

            isWhirrBasedDeployment = true;
        } else {
            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
            File hadoopConfigDir = new File(hadoopConfigDirPath);
            if (!hadoopConfigDir.exists()){
                throw new GFacHandlerException("Specified hadoop configuration directory doesn't exist.");
            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
                throw new GFacHandlerException("Cannot find any hadoop configuration files inside specified directory.");
            }

            this.hadoopConfigDir = hadoopConfigDir;
        }

        if(jobExecutionContext.isInPath()){
            try {
                handleInPath(jobExecutionContext);
            } catch (IOException e) {
                throw new GFacHandlerException("Error while copying input data from local file system to HDFS.",e);
            }
        } else {
            handleOutPath(jobExecutionContext);
        }
    }
View Full Code Here

                File hadoopSiteXML = launchHadoopCluster(hostDescription, tempDirectory);
                jobExecutionContext.getInMessageContext().addParameter("HADOOP_SITE_XML", hadoopSiteXML.getAbsolutePath());
                jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE", "WHIRR");
                // TODO: Add hadoop-site.xml to job execution context.
            } catch (IOException e) {
                throw new GFacHandlerException("IO Error while processing configurations.",e);
            } catch (ConfigurationException e) {
                throw  new GFacHandlerException("Whirr configuration error.", e);
            } catch (InterruptedException e) {
                throw new GFacHandlerException("Hadoop cluster launch interrupted.", e);
            } catch (TransformerException e) {
                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
            } catch (ParserConfigurationException e) {
                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
            }
        } else {
            jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE",
                    "MANUAL");
            jobExecutionContext.getInMessageContext().addParameter("HADOOP_CONFIG_DIR",
View Full Code Here

        if(hadoopHostDesc.isSetWhirrConfiguration()){
            HadoopHostType.WhirrConfiguration whirrConfig = hadoopHostDesc.getWhirrConfiguration();
            if(whirrConfig.isSetConfigurationFile()){
                File whirrConfigFile = new File(whirrConfig.getConfigurationFile());
                if(!whirrConfigFile.exists()){
                    throw new GFacHandlerException(
                            "Specified whirr configuration file doesn't exists.");
                }

                FileUtils.copyFileToDirectory(whirrConfigFile, workingDirectory);

                return new File(workingDirectory, whirrConfigFile.getName());
            } else if(whirrConfig.isSetConfiguration()){
                Properties whirrConfigProps =
                        whirrConfigurationsToProperties(whirrConfig.getConfiguration());
                File whirrConfigFile = new File(workingDirectory, "whirr-hadoop.config");
                whirrConfigProps.store(
                        new FileOutputStream(whirrConfigFile), null);

                return whirrConfigFile;
            }
        }

        throw new GFacHandlerException("Cannot find Whirr configurations. Whirr configuration "
                + "is required if you don't have already running Hadoop deployment.");
    }
View Full Code Here

            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
                try {
                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
                } catch (ApplicationSettingsException e) {
                    log.error(e.getMessage());
                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
                }
            }
            log.info("Invoking SCPInputHandler");
            super.invoke(jobExecutionContext);


            MessageContext input = jobExecutionContext.getInMessageContext();
            Set<String> parameters = input.getParameters().keySet();
            for (String paramName : parameters) {
                ActualParameter actualParameter = (ActualParameter) input.getParameters().get(paramName);
                String paramValue = MappingFactory.toString(actualParameter);
                //TODO: Review this with type
                if ("URI".equals(actualParameter.getType().getType().toString())) {
                    ((URIParameterType) actualParameter.getType()).setValue(stageInputFiles(jobExecutionContext, paramValue));
                } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
                    List<String> newFiles = new ArrayList<String>();
                    for (String paramValueEach : split) {
                        String stageInputFiles = stageInputFiles(jobExecutionContext, paramValueEach);
                        status.setTransferState(TransferState.UPLOAD);
                        detail.setTransferStatus(status);
                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
                        newFiles.add(stageInputFiles);
                    }
                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
                }
                inputNew.getParameters().put(paramName, actualParameter);
            }
        } catch (Exception e) {
            log.error(e.getMessage());
            status.setTransferState(TransferState.FAILED);
            detail.setTransferStatus(status);
            try {
                GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
                registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
            } catch (Exception e1) {
                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
            }
            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
        }
        jobExecutionContext.setInMessageContext(inputNew);
    }
View Full Code Here

            }
            cluster.scpTo(targetFile, paramValue);
            }
            return targetFile;
        } catch (SSHApiException e) {
            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
        }
    }
View Full Code Here

            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
                GFACSSHUtils.addSecurityContext(jobExecutionContext);
            }
        } catch (ApplicationSettingsException e) {
            log.error(e.getMessage());
            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
        } catch (GFacException e) {
            e.printStackTrace()//To change body of catch statement use File | Settings | File Templates.
        }

        log.info("Setup SSH job directorties");
View Full Code Here

TOP

Related Classes of org.apache.airavata.gfac.core.handler.GFacHandlerException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.