Package org.pentaho.di.core.logging

Examples of org.pentaho.di.core.logging.LogWriter


    }

    final String[] params = fillArguments(parameters);

    // Sadly Kettle always insists on creating a log-file. There is no way around it (yet).
    final LogWriter logWriter = LogWriter.getInstance("Kettle-reporting-datasource", false);
    try
    {
      final Repository repository = connectToRepository(logWriter);
      try
      {
        final TransMeta transMeta = loadTransformation(repository, resourceManager, resourceKey);
        transMeta.setArguments(params);
        final Trans trans = new Trans(transMeta);
        for (int i = 0; i < definedVariableNames.length; i++)
        {
          final ParameterMapping mapping = definedVariableNames[i];
          final String sourceName = mapping.getName();
          final String variableName = mapping.getAlias();
          final Object value = parameters.get(sourceName);
          if (value != null)
          {
            trans.setParameterValue(variableName, String.valueOf(value));
          }
        }

        transMeta.setInternalKettleVariables();
        trans.prepareExecution(transMeta.getArguments());

        TableProducer tableProducer = null;
        final List stepList = trans.getSteps();
        for (int i = 0; i < stepList.size(); i++)
        {
          final StepMetaDataCombi metaDataCombi = (StepMetaDataCombi) stepList.get(i);
          if (stepName.equals(metaDataCombi.stepname) == false)
          {
            continue;
          }
          final RowMetaInterface row = transMeta.getStepFields(stepName);
          tableProducer = new TableProducer(row, queryLimit, stopOnError);
          metaDataCombi.step.addRowListener(tableProducer);
          break;
        }

        if (tableProducer == null)
        {
          throw new ReportDataFactoryException("Cannot find the specified transformation step " + stepName);
        }

        currentlyRunningTransformation = trans;
        trans.startThreads();
        trans.waitUntilFinished();
        trans.cleanup();
        return tableProducer.getTableModel();
      }
      finally
      {
        currentlyRunningTransformation = null;
        if (repository != null)
        {
          repository.disconnect();
        }
      }
    }
    finally
    {
      logWriter.close();
    }
  }
View Full Code Here


              + id_job, dbe);
    }
  }

  public Result execute(Result prev_result, int nr, Repository rep, Job parentJob) {
    LogWriter log = LogWriter.getInstance();

    Result result = new Result(nr);
    result.setResult(false);

    log.logDetailed(toString(), "Start of processing");

    // String substitution..
    String realS3Bucket = environmentSubstitute(S3Bucket);
    String realAccessKey = environmentSubstitute(AccessKey);
    String realPrivateKey = environmentSubstitute(PrivateKey);
    String realFilenameToSend = environmentSubstitute(FilenameToSend);
    SendToS3Job proc = new SendToS3Job(realAccessKey, realPrivateKey, realS3Bucket, realFilenameToSend);

    try {
      proc.process()
      result.setResult(true);
    } catch (Exception e) {
      result.setNrErrors(1);
      e.printStackTrace();
      log.logError(toString(), "Error processing SendToS3Job : " + e.getMessage());
    }

    return result;
  }
View Full Code Here

  }
 
  //public long process()
  public void process() throws Exception {

    LogWriter log = LogWriter.getInstance();
    log.logDetailed(toString(), "Sending file to S3 Job    ");
    log.logDetailed(toString(), "_____________________________________");
    log.logDetailed(toString(), "Access Key\t : " + _AKey);
    log.logDetailed(toString(), "Private Key\t : " + "Keep it secret !");
    log.logDetailed(toString(), "End  Bucket\t : " + _S3Buck);
    log.logDetailed(toString(), "Filename\t : " + _FileNm);
    log.logDetailed(toString(), "_____________________________________");
   
    SendToS3();
  }
View Full Code Here

    SendToS3();
  }
 
  public void SendToS3() throws Exception {
   
  LogWriter log = LogWriter.getInstance();
   
  AWSCredentials awsCredentials = new AWSCredentials(_AKey,_PKey);
   
  S3Service s3Service = new RestS3Service(awsCredentials);
     
    S3Bucket[] myBuckets = s3Service.listAllBuckets();
    if(myBuckets != null){
      log.logDetailed(toString(),"Connected to S3 !");
      log.logDetailed(toString(), "_____________________________________");
    }
    log.logDetailed(toString(),"==>You have " + myBuckets.length + " Buckets in your S3" );
    log.logDetailed(toString(),"==>You will send the file [" + _FileNm + "] to the bucket [" + _S3Buck + "]" );
   
    String TargetBucket = _S3Buck;
     
    File fileData = new File(_FileNm);
    S3Object fileObject = new S3Object(fileData);
    log.logDetailed(toString(),"==>Hash value: " + fileObject.getMd5HashAsHex());
    log.logDetailed(toString(),"==>S3Object before upload: " + fileObject);
    log.logDetailed(toString(),"_____________________________________");

      // Upload the data objects.
    s3Service.putObject(TargetBucket, fileObject);
    log.logDetailed(toString(),"Sending file to S3 ...");
    log.logDetailed(toString(),"_____________________________________");
    log.logDetailed(toString(),"==>S3Object after upload: " + fileObject);
    log.logDetailed(toString(),"                                      ");
     
  }
View Full Code Here

TOP

Related Classes of org.pentaho.di.core.logging.LogWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.