Package com.xuggle.xuggler

Examples of com.xuggle.xuggler.IStreamCoder


    }

    protected static IStreamCoder createCoder(IStream stream,
        ICodec videoFormat, int height, int width, int framerate) {
        // get streams coder
        IStreamCoder coder = stream.getStreamCoder();
        // set codec for the stream
        coder.setCodec(videoFormat);
        // framerate
        IRational fps = IRational.make(framerate);
        // timebase = 1/fps
        coder.setTimeBase(IRational.make(fps.getDenominator(), fps
            .getNumerator()));
        coder.setPixelType(pixelformat);
        // set resolution
        coder.setHeight(height);
        coder.setWidth(width);
        // set some flags
        coder.setFlag(IStreamCoder.Flags.FLAG_QSCALE, true);
        coder.setFlag(IStreamCoder.Flags.FLAG_GLOBAL_HEADER, true);

        if (coder.getCodecID() == ICodec.ID.CODEC_ID_H264)
            applyX264Settings(coder);

        return coder;
    }
View Full Code Here


            statistic.dataRead(packet.getSize());

            if (coder == null) {
                int streamIndex = packet.getStreamIndex();
                IStreamCoder newCoder = container.getStream(streamIndex)
                    .getStreamCoder();
                if (newCoder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
                    videoStreamIndex = streamIndex;
                    coder = newCoder;
                    if ((errorNumber = coder.open()) < 0) {
                        videoSharingSession.reportError(new DecodingException(
                            IError.make(errorNumber).getDescription()));
View Full Code Here

        IContainer container = IContainer.make();
        XugglerException.throwIfInError(container.open(encoded, format));

        logger.trace("Looking for first audio stream in container");
        int streamId = -1;
        IStreamCoder audioCoder = null;
        int numStreams = container.getNumStreams();
        for (int num = 0; num < numStreams; num++) {
            IStream stream = container.getStream(num);
            IStreamCoder coder = stream.getStreamCoder();
            if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO) {
                streamId = num;
                audioCoder = coder;
                break;
            }
        }
View Full Code Here

    // query how many streams the call to open found
    int numStreams = container.getNumStreams();
   
    // and iterate through the streams to find the first audio stream
    int audioStreamId = -1;
    IStreamCoder audioCoder = null;
    for(int i = 0; i < numStreams; i++)
    {
      // Find the stream object
      IStream stream = container.getStream(i);
      // Get the pre-configured decoder that can decode this stream;
      IStreamCoder coder = stream.getStreamCoder();
     
      if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO)
      {
        audioStreamId = i;
        audioCoder = coder;
        break;
      }
View Full Code Here

                }

                int numStreams = container.getNumStreams();

                int audioStreamId = -1;
                IStreamCoder audioCoder = null;
                for(int i = 0; i < numStreams; i++)
                {
                        IStream stream = container.getStream(i);
                    IStreamCoder coder = stream.getStreamCoder();

                    if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO)
                    {
                        audioStreamId = i;
                        audioCoder = coder;
                        audioCoder.setBitRate(container.getBitRate());
                        break;
View Full Code Here

       *
       * You can create IStreamCoders yourself using
       * IStreamCoder#make(IStreamCoder.Direction), but then you have to set all
       * parameters yourself.
       */
      IStreamCoder ic = is.getStreamCoder();

      /**
       * Find out what Codec Xuggler guessed the input stream was encoded with.
       */
      ICodec.Type cType = ic.getCodecType();

      mIStreams[i] = is;
      mICoders[i] = ic;
      mOStreams[i] = null;
      mOCoders[i] = null;
      mASamplers[i] = null;
      mVSamplers[i] = null;
      mIVideoPictures[i] = null;
      mOVideoPictures[i] = null;
      mISamples[i] = null;
      mOSamples[i] = null;

      if (cType == ICodec.Type.CODEC_TYPE_AUDIO && mHasAudio
          && (astream == -1 || astream == i))
      {
         /**
         * First, did the user specify an audio codec?
         */
        ICodec codec = null;
        if (acodec != null)
        {
          /**
           * Looks like they did specify one; let's look it up by name.
           */
          codec = ICodec.findEncodingCodecByName(acodec);
          if (codec == null || codec.getType() != cType)
            throw new RuntimeException("could not find encoder: " + acodec);
        }
        else
        {
          /**
           * Looks like the user didn't specify an output coder for audio.
           *
           * So we ask Xuggler to guess an appropriate output coded based on the
           * URL, container format, and that it's audio.
           */
          codec = ICodec.guessEncodingCodec(oFmt, null, outputURL, null,
              cType);
          if (codec == null)
            throw new RuntimeException("could not guess " + cType
                + " encoder for: " + outputURL);
        }
        /**
         * So it looks like this stream as an audio stream. Now we add an audio
         * stream to the output container that we will use to encode our
         * resampled audio.
         */
        IStream os = mOContainer.addNewStream(codec);

        /**
         * And we ask the IStream for an appropriately configured IStreamCoder
         * for output.
         *
         * Unfortunately you still need to specify a lot of things for
         * outputting (because we can't really guess what you want to encode
         * as).
         */
        IStreamCoder oc = os.getStreamCoder();

        mOStreams[i] = os;
        mOCoders[i] = oc;

       /**
         * Now let's see if the codec can support the input sample format; if not
         * we pick the last sample format the codec supports.
         */
        Format preferredFormat = ic.getSampleFormat();
       
        List<Format> formats = codec.getSupportedAudioSampleFormats();
        for(Format format : formats) {
          oc.setSampleFormat(format);
          if (format == preferredFormat)
            break;
        }

        final String apreset = cmdLine.getOptionValue("apreset");
        if (apreset != null)
          Configuration.configure(apreset, oc);
       
        /**
         * In general a IStreamCoder encoding audio needs to know: 1) A ICodec
         * to use. 2) The sample rate and number of channels of the audio. Most
         * everything else can be defaulted.
         */

        /**
         * If the user didn't specify a sample rate to encode as, then just use
         * the same sample rate as the input.
         */
        if (sampleRate == 0)
          sampleRate = ic.getSampleRate();
        oc.setSampleRate(sampleRate);
        /**
         * If the user didn't specify a bit rate to encode as, then just use the
         * same bit as the input.
         */
        if (abitrate == 0)
          abitrate = ic.getBitRate();
        if (abitrate == 0)
          // some containers don't give a bit-rate
          abitrate = 64000;
        oc.setBitRate(abitrate);
       
        /**
         * If the user didn't specify the number of channels to encode audio as,
         * just assume we're keeping the same number of channels.
         */
        if (channels == 0)
          channels = ic.getChannels();
        oc.setChannels(channels);

        /**
         * And set the quality (which defaults to 0, or highest, if the user
         * doesn't tell us one).
         */
        oc.setGlobalQuality(aquality);

        /**
         * Now check if our output channels or sample rate differ from our input
         * channels or sample rate.
         *
         * If they do, we're going to need to resample the input audio to be in
         * the right format to output.
         */
        if (oc.getChannels() != ic.getChannels()
            || oc.getSampleRate() != ic.getSampleRate()
            || oc.getSampleFormat() != ic.getSampleFormat())
        {
          /**
           * Create an audio resampler to do that job.
           */
          mASamplers[i] = IAudioResampler.make(oc.getChannels(), ic
              .getChannels(), oc.getSampleRate(), ic.getSampleRate(),
              oc.getSampleFormat(), ic.getSampleFormat());
          if (mASamplers[i] == null)
          {
            throw new RuntimeException(
                "could not open audio resampler for stream: " + i);
          }
        }
        else
        {
          mASamplers[i] = null;
        }
        /**
         * Finally, create some buffers for the input and output audio
         * themselves.
         *
         * We'll use these repeated during the #run(CommandLine) method.
         */
        mISamples[i] = IAudioSamples.make(1024, ic.getChannels(), ic.getSampleFormat());
        mOSamples[i] = IAudioSamples.make(1024, oc.getChannels(), oc.getSampleFormat());
      }
      else if (cType == ICodec.Type.CODEC_TYPE_VIDEO && mHasVideo
          && (vstream == -1 || vstream == i))
      {
        /**
         * If you're reading these commends, this does the same thing as the
         * above branch, only for video. I'm going to assume you read those
         * comments and will only document something substantially different
         * here.
         */
        ICodec codec = null;
        if (vcodec != null)
        {
          codec = ICodec.findEncodingCodecByName(vcodec);
          if (codec == null || codec.getType() != cType)
            throw new RuntimeException("could not find encoder: " + vcodec);
        }
        else
        {
          codec = ICodec.guessEncodingCodec(oFmt, null, outputURL, null,
              cType);
          if (codec == null)
            throw new RuntimeException("could not guess " + cType
                + " encoder for: " + outputURL);

        }
        final IStream os = mOContainer.addNewStream(codec);
        final IStreamCoder oc = os.getStreamCoder();

        mOStreams[i] = os;
        mOCoders[i] = oc;


        // Set options AFTER selecting codec
        final String vpreset = cmdLine.getOptionValue("vpreset");
        if (vpreset != null)
          Configuration.configure(vpreset, oc);
       
        /**
         * In general a IStreamCoder encoding video needs to know: 1) A ICodec
         * to use. 2) The Width and Height of the Video 3) The pixel format
         * (e.g. IPixelFormat.Type#YUV420P) of the video data. Most everything
         * else can be defaulted.
         */
        if (vbitrate == 0)
          vbitrate = ic.getBitRate();
        if (vbitrate == 0)
          vbitrate = 250000;
        oc.setBitRate(vbitrate);
        if (vbitratetolerance > 0)
          oc.setBitRateTolerance(vbitratetolerance);

        int oWidth = ic.getWidth();
        int oHeight = ic.getHeight();

        if (oHeight <= 0 || oWidth <= 0)
          throw new RuntimeException("could not find width or height in url: "
              + inputURL);

        /**
         * For this program we don't allow the user to specify the pixel format
         * type; we force the output to be the same as the input.
         */
        oc.setPixelType(ic.getPixelType());

        if (vscaleFactor != 1.0)
        {
          /**
           * In this case, it looks like the output video requires rescaling, so
           * we create a IVideoResampler to do that dirty work.
           */
          oWidth = (int) (oWidth * vscaleFactor);
          oHeight = (int) (oHeight * vscaleFactor);

          mVSamplers[i] = IVideoResampler
              .make(oWidth, oHeight, oc.getPixelType(), ic.getWidth(), ic
                  .getHeight(), ic.getPixelType());
          if (mVSamplers[i] == null)
          {
            throw new RuntimeException(
                "This version of Xuggler does not support video resampling "
                    + i);
          }
        }
        else
        {
          mVSamplers[i] = null;
        }
        oc.setHeight(oHeight);
        oc.setWidth(oWidth);

        if (vquality >= 0)
        {
          oc.setFlag(IStreamCoder.Flags.FLAG_QSCALE, true);
          oc.setGlobalQuality(vquality);
        }

        /**
         * TimeBases are important, especially for Video. In general Audio
         * encoders will assume that any new audio happens IMMEDIATELY after any
         * prior audio finishes playing. But for video, we need to make sure
         * it's being output at the right rate.
         *
         * In this case we make sure we set the same time base as the input, and
         * then we don't change the time stamps of any IVideoPictures.
         *
         * But take my word that time stamps are tricky, and this only touches
         * the envelope. The good news is, it's easier in Xuggler than some
         * other systems.
         */
        IRational num = null;
        num = ic.getFrameRate();
        oc.setFrameRate(num);
        oc.setTimeBase(IRational.make(num.getDenominator(), num
                .getNumerator()));
        num = null;

        /**
         * And allocate buffers for us to store decoded and resample video
         * pictures.
         */
        mIVideoPictures[i] = IVideoPicture.make(ic.getPixelType(), ic
            .getWidth(), ic.getHeight());
        mOVideoPictures[i] = IVideoPicture.make(oc.getPixelType(), oc
            .getWidth(), oc.getHeight());
      }
      else
      {
        log.warn("Ignoring input stream {} of type {}", i, cType);
      }
View Full Code Here

    /**
     * And keep some convenience pointers for the specific stream we're working
     * on for a packet.
     */
    IStreamCoder ic = null;
    IStreamCoder oc = null;
    IAudioResampler as = null;
    IVideoResampler vs = null;
    IVideoPicture inFrame = null;
    IVideoPicture reFrame = null;

    /**
     * Now, we've already opened the files in #setupStreams(CommandLine). We
     * just keep reading packets from it until the IContainer returns <0
     */
    while (mIContainer.readNextPacket(iPacket) == 0)
    {
      /**
       * Find out which stream this packet belongs to.
       */
      int i = iPacket.getStreamIndex();
      int offset = 0;

      /**
       * Find out if this stream has a starting timestamp
       */
      IStream stream = mIContainer.getStream(i);
      long tsOffset = 0;
      if (stream.getStartTime() != Global.NO_PTS && stream.getStartTime() > 0
          && stream.getTimeBase() != null)
      {
        IRational defTimeBase = IRational.make(1,
            (int) Global.DEFAULT_PTS_PER_SECOND);
        tsOffset = defTimeBase.rescale(stream.getStartTime(), stream
            .getTimeBase());
      }
      /**
       * And look up the appropriate objects that are working on that stream.
       */
      ic = mICoders[i];
      oc = mOCoders[i];
      as = mASamplers[i];
      vs = mVSamplers[i];
      inFrame = mIVideoPictures[i];
      reFrame = mOVideoPictures[i];
      inSamples = mISamples[i];
      reSamples = mOSamples[i];

      if (oc == null)
        // we didn't set up this coder; ignore the packet
        continue;

      /**
       * Find out if the stream is audio or video.
       */
      ICodec.Type cType = ic.getCodecType();

      if (cType == ICodec.Type.CODEC_TYPE_AUDIO && mHasAudio)
      {
        /**
         * Decoding audio works by taking the data in the packet, and eating
         * chunks from it to create decoded raw data.
         *
         * However, there may be more data in a packet than is needed to get one
         * set of samples (or less), so you need to iterate through the byts to
         * get that data.
         *
         * The following loop is the standard way of doing that.
         */
        while (offset < iPacket.getSize())
        {
          retval = ic.decodeAudio(inSamples, iPacket, offset);
          if (retval <= 0)
            throw new RuntimeException("could not decode audio.  stream: " + i);

          if (inSamples.getTimeStamp() != Global.NO_PTS)
            inSamples.setTimeStamp(inSamples.getTimeStamp() - tsOffset);

          log.trace("packet:{}; samples:{}; offset:{}", new Object[]
          {
              iPacket, inSamples, tsOffset
          });

          /**
           * If not an error, the decodeAudio returns the number of bytes it
           * consumed. We use that so the next time around the loop we get new
           * data.
           */
          offset += retval;
          int numSamplesConsumed = 0;
          /**
           * If as is not null then we know a resample was needed, so we do that
           * resample now.
           */
          if (as != null && inSamples.getNumSamples() > 0)
          {
            retval = as.resample(reSamples, inSamples, inSamples
                .getNumSamples());

            outSamples = reSamples;
          }
          else
          {
            outSamples = inSamples;
          }

          /**
           * Include call a hook to derivied classes to allow them to alter the
           * audio frame.
           */

          outSamples = alterAudioFrame(outSamples);

          /**
           * Now that we've resampled, it's time to encode the audio.
           *
           * This workflow is similar to decoding; you may have more, less or
           * just enough audio samples available to encode a packet. But you
           * must iterate through.
           *
           * Unfortunately (don't ask why) there is a slight difference between
           * encodeAudio and decodeAudio; encodeAudio returns the number of
           * samples consumed, NOT the number of bytes. This can be confusing,
           * and we encourage you to read the IAudioSamples documentation to
           * find out what the difference is.
           *
           * But in any case, the following loop encodes the samples we have
           * into packets.
           */
          while (numSamplesConsumed < outSamples.getNumSamples())
          {
            retval = oc.encodeAudio(oPacket, outSamples, numSamplesConsumed);
            if (retval <= 0)
              throw new RuntimeException("Could not encode any audio: "
                  + retval);
            /**
             * Increment the number of samples consumed, so that the next time
             * through this loop we encode new audio
             */
            numSamplesConsumed += retval;
            log.trace("out packet:{}; samples:{}; offset:{}", new Object[]{
                oPacket, outSamples, tsOffset
            });

            writePacket(oPacket);
          }
        }

      }
      else if (cType == ICodec.Type.CODEC_TYPE_VIDEO && mHasVideo)
      {
        /**
         * This encoding workflow is pretty much the same as the for the audio
         * above.
         *
         * The only major delta is that encodeVideo() will always consume one
         * frame (whereas encodeAudio() might only consume some samples in an
         * IAudioSamples buffer); it might not be able to output a packet yet,
         * but you can assume that you it consumes the entire frame.
         */
        IVideoPicture outFrame = null;
        while (offset < iPacket.getSize())
        {
          retval = ic.decodeVideo(inFrame, iPacket, offset);
          if (retval <= 0)
            throw new RuntimeException("could not decode any video.  stream: "
                + i);

          log.trace("decoded vid ts: {}; pkts ts: {}", inFrame.getTimeStamp(),
              iPacket.getTimeStamp());
          if (inFrame.getTimeStamp() != Global.NO_PTS)
            inFrame.setTimeStamp(inFrame.getTimeStamp() - tsOffset);

          offset += retval;
          if (inFrame.isComplete())
          {
            if (vs != null)
            {
              retval = vs.resample(reFrame, inFrame);
              if (retval < 0)
                throw new RuntimeException("could not resample video");
              outFrame = reFrame;
            }
            else
            {
              outFrame = inFrame;
            }

            /**
             * Include call a hook to derivied classes to allow them to alter
             * the audio frame.
             */

            outFrame = alterVideoFrame(outFrame);

            outFrame.setQuality(0);
            retval = oc.encodeVideo(oPacket, outFrame, 0);
            if (retval < 0)
              throw new RuntimeException("could not encode video");
            writePacket(oPacket);
          }
        }
View Full Code Here

    // if not found create one

    if (videoConverter == null)
    {
      IStream stream = mStreams.get(streamIndex);
      IStreamCoder coder = stream.getStreamCoder();
      videoConverter = ConverterFactory.createConverter(
        ConverterFactory.findDescriptor(image),
        coder.getPixelType(),
        coder.getWidth(), coder.getHeight(),
        image.getWidth(), image.getHeight());
      mVideoConverters.put(streamIndex, videoConverter);
    }

    // return the converter
View Full Code Here

    IStream stream = mStreams.get(getOutputStreamIndex(inputStreamIndex));
    if (null == stream)
      throw new RuntimeException("invalid input stream index (no stream): "
         + inputStreamIndex);
    IStreamCoder coder = stream.getStreamCoder();
    if (null == coder)
      throw new RuntimeException("invalid input stream index (no coder): "
        + inputStreamIndex);
   
    // return the coder
View Full Code Here

  private boolean addStreamFromContainer(int inputStreamIndex)
  {
    // get the input stream

    IStream inputStream = mInputContainer.getStream(inputStreamIndex);
    IStreamCoder inputCoder = inputStream.getStreamCoder();
    ICodec.Type inputType = inputCoder.getCodecType();
    ICodec.ID inputID = inputCoder.getCodecID();
   
    // if this stream is not a supported type, indicate failure

    if (!isSupportedCodecType(inputType))
      return false;

    IContainerFormat format = getContainer().getContainerFormat();
   
    switch(inputType)
    {
      case CODEC_TYPE_AUDIO:
        addAudioStream(inputStream.getIndex(),
            inputStream.getId(),
            format.establishOutputCodecId(inputID),
            inputCoder.getChannels(),
            inputCoder.getSampleRate());
        break;
      case CODEC_TYPE_VIDEO:
        addVideoStream(inputStream.getIndex(),
            inputStream.getId(),
            format.establishOutputCodecId(inputID),
            inputCoder.getFrameRate(),
            inputCoder.getWidth(),
            inputCoder.getHeight());
        break;
      default:
        break;
    }
    return true;
View Full Code Here

TOP

Related Classes of com.xuggle.xuggler.IStreamCoder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.