Package eu.planets_project.tb.impl.model.exec

Examples of eu.planets_project.tb.impl.model.exec.BatchExecutionRecordImpl


    public void deleteMeasurementEvent() {
        log.info("Deleting MeasurementEvent "+this.getId());
        TestbedManagerImpl tbm = (TestbedManagerImpl) JSFUtil.getManagedObject("TestbedManager");
        // Now update experiment.
        ExperimentBean expBean = (ExperimentBean)JSFUtil.getManagedObject("ExperimentBean");
        BatchExecutionRecordImpl batch = expBean.getExperiment().getExperimentExecutable().getBatchExecutionRecords().iterator().next();
        ExecutionRecordImpl run = batch.getRuns().iterator().next();
       
        // Remove the Event itself:
        ExperimentPersistencyRemote db = tbm.getExperimentPersistencyRemote();
        setTargetInvocation(null);
        setTargetExecution(null);
View Full Code Here


        return executionRecords;
    }
  

    public boolean getHasExecuted() {
        BatchExecutionRecordImpl batch = this.getBatch();
        if( batch == null ) return false;
        if( batch.getStartDate() == null ) return false;
        return true;
    }
View Full Code Here

   
    /**
     * @return the report from the batch processor level:
     */
    public String getBatchReport() {
        BatchExecutionRecordImpl batch = this.getBatch();
        if( batch == null || batch.getWorkflowExecutionLog() == null ) return "No batch report logged.";
        return batch.getWorkflowExecutionLog().getSerializedWorkflowResult();
    }
View Full Code Here

      log.info("processing WEEBatchExperiment: wfResult = null -> processing notify_WorkflowFailed");
      this.processNotify_WorkflowFailed(expID, "WorkflowResult not available");
      return;
    }
    //create a BatchExecutionRecord
    BatchExecutionRecordImpl batchRecord = new BatchExecutionRecordImpl( (ExperimentExecutableImpl) exp.getExperimentExecutable() );
    //startTime
    Calendar c1 = new GregorianCalendar();
    c1.setTimeInMillis(weeWFResult.getStartTime());
    batchRecord.setStartDate(c1);
   
    //endTime
    Calendar c2 = new GregorianCalendar();
    c2.setTimeInMillis(weeWFResult.getEndTime());
    batchRecord.setStartDate(c2);
 
    BatchWorkflowResultLogImpl wfResultLog = new BatchWorkflowResultLogImpl();
    try {
      //try serializing the workflow result log- as this is the way it needs to be stored
      String wfResultxml = JaxbUtil.marshallObjectwithJAXB(WorkflowResult.class, weeWFResult);
      log.debug("Successfully serialized the workflowResult Log via Jaxb" );
      //store the wfResultLog in the db model bean
      wfResultLog.setSerializedWorkflowResult(wfResultxml);
    } catch (Exception e) {
      log.debug("Problems serializing wfResultLog object",e);
      this.processNotify_WorkflowFailed(expID, "WorkflowResult not serializable");
      return;
    }
   
    batchRecord.setWorkflowExecutionLog(wfResultLog);
    batchRecord.setBatchRunSucceeded(true);
   
    //now iterate over the results and extract and store all crated digos
    List<ExecutionRecordImpl> execRecords = new ArrayList<ExecutionRecordImpl>();
   
    //group related wfResult items per input digital objects
    Map<URI,List<WorkflowResultItem>> structuredResults = this.getAllWFResultItemsPerInputDigo(weeWFResult);
    //FIXME AL: We still need to crate empty executionRecords for the items that weren't processed by the wee (e.g. expSetup.getInputData and compare to the log)
    for(URI inputDigoURI : structuredResults.keySet()){
      int actionCounter = 0;
      ExecutionRecordImpl execRecord = new ExecutionRecordImpl(batchRecord);
      //the input Digo for all this information is about
      // FIXME This appears to be the resolved URI, not the proper Planets DR URI:
      execRecord.setDigitalObjectReferenceCopy(inputDigoURI+"");
      Properties p = new Properties();
      //iterate over the results and document the migration action - all other information goes into properties.
      for(WorkflowResultItem wfResultItem : structuredResults.get(inputDigoURI)){
       
        //1. check if this record was about the migration action
        String action = wfResultItem.getSActionIdentifier();
        if(action.startsWith(WorkflowResultItem.SERVICE_ACTION_MIGRATION)){
          URI outputDigoRef = wfResultItem.getOutputDigitalObjectRef();
          if(outputDigoRef!=null){
            //DigitalObject outputDigo = dataRegistry.retrieve(outputDigoRef);
            //1.a download the ResultDigo into the TB and store it's reference - if it's the final migration producing the output object
            if(action.equals(WorkflowResultItem.SERVICE_ACTION_FINAL_MIGRATION)){
              //documenting the final output object
              URI tbUri = execRecord.setDigitalObjectResult(outputDigoRef, exp);
              //FIXME: currently not possible to mix DIGO and PROPERTY result:
              p.put(ExecutionRecordImpl.RESULT_PROPERTY_URI, tbUri.toString());
            }
            else{
            //1.b documenting the interim results in a multi-migration-workflow
              //DataHandler dh = new DataHandlerImpl();
                  //URI tbUri = dh.storeDigitalObject(outputDigo, exp);
                  p.put(ExecutionRecordImpl.RESULT_PROPERTY_INTERIM_RESULT_URI+"["+actionCounter+"]", outputDigoRef.toString());
            }
            Calendar start = new GregorianCalendar();
            start.setTimeInMillis(wfResultItem.getStartTime());
                        execRecord.setStartDate(start);
            Calendar end = new GregorianCalendar();
            end.setTimeInMillis(wfResultItem.getEndTime());
                        execRecord.setEndDate(end);
          }
        }
     
        //1b. every service action gets persisted as a stage record
        ExecutionStageRecordImpl stageRecord = fillInExecutionStageRecord(wfResultItem,actionCounter,execRecord,action,exp.getEntityID());
              execRecord.getStages().add(stageRecord);
       
        //2. or about some general reporting information
        if(action.startsWith(WorkflowResultItem.GENERAL_WORKFLOW_ACTION)){
          execRecord.setReportLog(this.parseReportLog(wfResultItem));
        }

        //3. document all other metadata for actions: identification, etc. as properties over all actions
        try{
          this.updateProperties(actionCounter, p, wfResultItem);
        }catch(Exception e){
          log.error("processing WEEBatchExperiment: Problems crating execution record properties for a workflowResultItem "+e);
        }
        actionCounter++;
      }
      try {
        execRecord.setPropertiesListResult(p);
      } catch (IOException e) {
        log.debug("processing WEEBatchExperiment: Problem adding properties to executionRecord: "+e);
      }
     
      //got all information - now add the record for this inputDigo
      log.info("processing WEEBatchExperiment: Adding an execution record: "+inputDigoURI);
      execRecords.add(execRecord);
    }
    batchRecord.setRuns(execRecords);

    this.helperUpdateExpWithBatchRecord(exp, batchRecord);
  }
View Full Code Here

   * @param expID
   * @param failureReason
   */
  public void processNotify_WorkflowFailed(long expID,String failureReason){
    Experiment exp = testbedMan.getExperiment(expID);
    BatchExecutionRecordImpl batchRecord = new BatchExecutionRecordImpl((ExperimentExecutableImpl)exp.getExperimentExecutable());
    batchRecord.setBatchRunSucceeded(false);
   
    this.helperUpdateExpWithBatchRecord(exp, batchRecord);
    //TODO AL: any more fields/events/measurements to extract?
  }
View Full Code Here

        job.setStatus(TestbedBatchJob.RUNNING);
        job.setPercentComplete(0);
        job.setStartDate(Calendar.getInstance());
        Experiment exp = edao.findExperiment(job.getExpID());
        // Set up the DB:
        BatchExecutionRecordImpl batch = this.createExperimentBatch(job, exp);
       
        try {
            // FIXME, Some experiment types may take all DOBs into one workflow?  Emulation?
           
            // Set up the basics:
            DataHandler dh = new DataHandlerImpl();
            int total = job.getDigitalObjects().size();
            int i = 0;
           
           
            // Process each in turn:
            for( String filename : job.getDigitalObjects() ) {
                Calendar start = Calendar.getInstance();
                log.info("Running job: "+(i+1)+"/"+total);
                DigitalObject dob = dh.get(filename).getDigitalObject();
                WorkflowResult wfr = null;
               
                // Actually run the workflow:
                try {
                    wfr = this.executeWorkflowOn(job, dob);
                    job.setWorkflowResult(filename, wfr);
                } catch( Exception e ) {
                    e.printStackTrace();
                }
               
                // Report:
                if( wfr != null ) {
                    // Patch in the start and end dates:
                    wfr.setStartDate(start);
                    wfr.setEndDate( Calendar.getInstance());
                    // Inspect the report:
                    if( wfr.getReportLog() != null ) {
                        log.info("Got report: " + wfr.getReportLog());
                    }
                    // Is there a result?
                    if( wfr.getResult() != null ) {
                        log.info("Got result: "+wfr.getResult().toString());
                    }
                }

                // Store results in the database:
                this.storeWorkflowResults(job, wfr, dob, filename, batch, exp );
               
                log.info("Ran job: "+(i+1)+"/"+total);
                // Update counter:
                i++;
                job.setPercentComplete((int)(100.0*i/total));
            }
           
            // Record that all went well:
            log.info("Status: DONE - All went well.");
            // Set the job status:
            job.setStatus(TestbedBatchJob.DONE);
            job.setPercentComplete(100);
            job.setEndDate(Calendar.getInstance());
            // Record batch info:
            exp.getExperimentExecutable().setExecutionSuccess(true);
            batch.setBatchRunSucceeded(true);
        } catch( Exception e ) {
            job.setStatus(TestbedBatchJob.FAILED);
            job.setPercentComplete(100);
            job.setEndDate(Calendar.getInstance());
            log.error("Job failed, with exception: "+e);
            batch.setBatchRunSucceeded(false);
            exp.getExperimentExecutable().setExecutionSuccess(false);
            e.printStackTrace();
        }

        // Record general information:
        batch.setEndDate(job.getEndDate());
        exp.getExperimentExecutable().setExecutableInvoked(true);
        exp.getExperimentExecutable().setExecutionCompleted(true);
        exp.getExperimentExecutable().setExecutionEndDate(Calendar.getInstance().getTimeInMillis());
        exp.getExperimentExecution().setEndDate(Calendar.getInstance());
        exp.getExperimentExecution().setState(Experiment.STATE_COMPLETED);
View Full Code Here

    /**
     * @param job
     * @return
     */
    private BatchExecutionRecordImpl createExperimentBatch(TestbedBatchJob job, Experiment exp) {
        BatchExecutionRecordImpl batch = new BatchExecutionRecordImpl((ExperimentExecutableImpl) exp.getExperimentExecutable());
        batch.setStartDate(job.getStartDate());
        log.info("Adding results of job "+job.getExpID()+" ("+job.getStatus()+") to experiment "+exp.getEntityID()+".");
        exp.getExperimentExecutable().getBatchExecutionRecords().add(batch);
        //FIXME edao.updateExperiment(exp);
       
        return batch;
View Full Code Here

        // Populate using the results:
        Set<BatchExecutionRecordImpl> records = getExperiment().getExperimentExecutable().getBatchExecutionRecords();
        log.info("Found batch list: "+records);
        if( records != null && records.size() > 0 ) {
            log.info("Found batches: "+records.size());
            BatchExecutionRecordImpl batch = records.iterator().next();
            for( ExecutionRecordImpl exr : batch.getRuns() ) {
                log.info("Found result: "+exr.getResultType());
                ResultsForDigitalObjectBean res = new ResultsForDigitalObjectBean(exr.getDigitalObjectReferenceCopy());
                results.add(res);
                // Collate successes:
                runOnes.add( normaliseDataReference(exr.getDigitalObjectReferenceCopy()) );
View Full Code Here

        // Look for data:
        double size_total = 0.0;
        double thru_time_total = 0.0;
        for( Experiment exp : experiments ) {
            if( exp.getExperimentExecutable().getBatchExecutionRecords().size() > 0 ) {
                BatchExecutionRecordImpl ber = exp.getExperimentExecutable().getBatchExecutionRecords().iterator().next();
                if( ber.isBatchRunSucceeded() ) {
                    for( ExecutionRecordImpl run : ber.getRuns() ) {
                        Double time_s = null;
                        if( run.getStartDate() != null && run.getEndDate() !=null ) {
                            time_s = ( run.getEndDate().getTimeInMillis() - run.getStartDate().getTimeInMillis() ) / 1000.0;
                        }
                        // Look for digital object:
View Full Code Here

            log.info("Batch ExecutionRecords == null");
            return 0;
        } else {
            log.info("Batch ExecutionRecords #"+this.getBatchExecutionRecords().size());
            if( this.getBatchExecutionRecords().size() > 0 ) {
                BatchExecutionRecordImpl b = this.getBatchExecutionRecords().iterator().next();
                if( b.getRuns() != null ) {
                    log.info("Batch ExecutionRecord.get(1).getRuns() #"+b.getRuns().size());
                }
            }
            return this.getBatchExecutionRecords().size();
        }
    }
View Full Code Here

TOP

Related Classes of eu.planets_project.tb.impl.model.exec.BatchExecutionRecordImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.