Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions

:
#)AVIWriter.java Copyright (c) 2011 Werner Randelshofer, Goldau, Switzerland. All rights reserved. You may not use, copy or modify this file, except in compliance onlyWith the license agreement you entered into onlyWith Werner Randelshofer. For details see accompanying license terms.
 
 package org.monte.media.avi;
 
 import java.io.*;
 import static org.monte.media.AudioFormatKeys.*;
 import static org.monte.media.VideoFormatKeys.*;
 import static org.monte.media.BufferFlag.*;

Provides high-level support for encoding and writing audio and video samples into an AVI 1.0 file.

Author(s):
Werner Randelshofer
Version:
$Id: AVIWriter.java 306 2013-01-04 16:19:29Z werner $
 
 public class AVIWriter extends AVIOutputStream implements MovieWriter {
 
     public final static Format AVI = new Format(.);
     public final static Format VIDEO_RAW = new Format(
             .,
     public final static Format VIDEO_JPEG = new Format(
             .,
     public final static Format VIDEO_PNG = new Format(
             .,
     public final static Format VIDEO_RLE = new Format(
             .,
     public final static Format VIDEO_SCREEN_CAPTURE = new Format(
             .,
Creates a new AVI writer.

Parameters:
file the output file
 
     public AVIWriter(File filethrows IOException {
         super(file);
     }

    
Creates a new AVI writer.

Parameters:
out the output stream.
 
     public AVIWriter(ImageOutputStream outthrows IOException {
         super(out);
     }
 
     @Override
     public Format getFileFormat() throws IOException {
         return ;
     }
 
     @Override
     public Format getFormat(int track) {
         return .get(track).;
     }

    
Returns the media duration of the track in seconds.
 
     @Override
     public Rational getDuration(int track) {
         Track tr = .get(track);
         long duration = getMediaDuration(track);
         return new Rational(duration * tr.scaletr.rate);
     }

    
Adds a track.

Parameters:
format The format of the track.
Returns:
The track number.
    @Override
    public int addTrack(Format formatthrows IOException {
        if (format.get() == .) {
            return addVideoTrack(format);
        } else {
            return addAudioTrack(format);
        }
    }

    
Adds a video track.

Parameters:
format The format of the track.
Returns:
The track number.
    private int addVideoTrack(Format vfthrows IOException {
        if (!vf.containsKey()) {
            throw new IllegalArgumentException("EncodingKey missing in " + vf);
        }
        if (!vf.containsKey()) {
            throw new IllegalArgumentException("FrameRateKey missing in " + vf);
        }
        if (!vf.containsKey()) {
            throw new IllegalArgumentException("WidthKey missing in " + vf);
        }
        if (!vf.containsKey()) {
            throw new IllegalArgumentException("HeightKey missing in " + vf);
        }
        if (!vf.containsKey()) {
            throw new IllegalArgumentException("DepthKey missing in " + vf);
        }
        int tr = addVideoTrack(vf.get(),
                vf.get().getDenominator(), vf.get().getNumerator(),
                vf.get(), vf.get(), vf.get(),
                vf.get().floor(1).intValue());
        setCompressionQuality(trvf.get(, 1.0f));
        return tr;
    }

    
Adds an audio track.

Parameters:
format The format of the track.
Returns:
The track number.
    private int addAudioTrack(Format formatthrows IOException {
        int waveFormatTag = 0x0001; // WAVE_FORMAT_PCM
        long timeScale = 1;
        long sampleRate = format.get(new Rational(41000, 0)).longValue();
        int numberOfChannels = format.get(, 1);
        int sampleSizeInBits = format.get(, 16); //
        boolean isCompressed = false// FIXME
        int frameDuration = 1;
        int frameSize = format.get(, (sampleSizeInBits + 7) / 8 * numberOfChannels);
        String enc = format.get();
        if (enc == null) {
            waveFormatTag = 0x0001; // WAVE_FORMAT_PCM
        } else if (enc.equals()) {
            waveFormatTag = 0x0001; // WAVE_FORMAT_PCM
        } else if (enc.equals()) {
            waveFormatTag = 0x0001; // WAVE_FORMAT_PCM
        } else if (enc.equals()) {
            waveFormatTag = 0x0001; // WAVE_FORMAT_PCM
        } else if (enc.equals()) {
            waveFormatTag = 0x0001; // WAVE_FORMAT_PCM
        } else if (enc.equals()) {
            waveFormatTag = 0x0001; // WAVE_FORMAT_PCM - FIXME
        } else {
            waveFormatTag = RIFFParser.stringToID(format.get()) & 0xffff;
        }
        return addAudioTrack(waveFormatTag//
                timeScalesampleRate//
                numberOfChannelssampleSizeInBits//
                isCompressed//
                frameDurationframeSize);
    }

    
Returns the codec of the specified track.
    public Codec getCodec(int track) {
        return .get(track).;
    }

    
Sets the codec for the specified track.
    public void setCodec(int trackCodec codec) {
        .get(track). = codec;
    }
    @Override
    public int getTrackCount() {
        return .size();
    }

    
Encodes the provided image and writes its sample data into the specified track.

Parameters:
track The track index.
image The image of the video frame.
duration Duration given in media time units.
Throws:
IndexOutofBoundsException if the track index is out of bounds.
if the duration is less than 1, or if the dimension of the frame does not match the dimension of the video.
java.lang.UnsupportedOperationException if the MovieWriter does not have a built-in encoder for this video format.
java.io.IOException if writing the sample data failed.
    public void write(int trackBufferedImage imagelong durationthrows IOException {
        ensureStarted();
        VideoTrack vt = (VideoTrack.get(track);
        if (vt.codec == null) {
            createCodec(track);
        }
        if (vt.codec == null) {
            throw new UnsupportedOperationException("No codec for this format: " + vt.format);
        }
        // The dimension of the image must match the dimension of the video track
        Format fmt = vt.format;
        if (fmt.get() != image.getWidth() || fmt.get() != image.getHeight()) {
            throw new IllegalArgumentException("Dimensions of image[" + vt.samples.size()
                    + "] (width=" + image.getWidth() + ", height=" + image.getHeight()
                    + ") differs from video format of track: " + fmt);
        }
        // Encode pixel data
        {
            if (vt.outputBuffer == null) {
                vt.outputBuffer = new Buffer();
            }
            boolean isKeyframe = vt.syncInterval == 0 ? false : vt.samples.size() % vt.syncInterval == 0;
            Buffer inputBuffer = new Buffer();
            inputBuffer.flags = (isKeyframe) ? EnumSet.of() : EnumSet.noneOf(BufferFlag.class);
            inputBuffer.data = image;
            vt.codec.process(inputBuffervt.outputBuffer);
            if (vt.outputBuffer.flags.contains()) {
                return;
            }
            // Encode palette data
            isKeyframe = vt.outputBuffer.flags.contains();
            boolean paletteChange = writePalette(trackimageisKeyframe);
            writeSample(track, (byte[]) vt.outputBuffer.datavt.outputBuffer.offsetvt.outputBuffer.lengthisKeyframe && !paletteChange);
            /*
             long offset = getRelativeStreamPosition();
             DataChunk videoFrameChunk = new DataChunk(vt.getSampleChunkFourCC(isKeyframe));
             moviChunk.add(videoFrameChunk);
             videoFrameChunk.getOutputStream().write((byte[]) vt.outputBuffer.data, vt.outputBuffer.offset, vt.outputBuffer.length);
             videoFrameChunk.finish();
             long length = getRelativeStreamPosition() - offset;
             Sample s=new Sample(videoFrameChunk.chunkType, 1, offset, length, isKeyframe&&!paletteChange);
             vt.addSample(s);
             idx1.add(s);
            
             if (getRelativeStreamPosition() > 1L << 32) {
             throw new IOException("AVI file is larger than 4 GB");
             }*/
        }
    }

    
Encodes the data provided in the buffer and then writes it into the specified track.

Does nothing if the discard-flag in the buffer is set to true.

Parameters:
track The track number.
buf The buffer containing a data sample.
    @Override
    public void write(int trackBuffer bufthrows IOException {
        ensureStarted();
        if (buf.flags.contains()) {
            return;
        }
        Track tr = .get(track);
        boolean isKeyframe = buf.flags.contains();
        if (buf.data instanceof BufferedImage) {
            if (tr.syncInterval != 0) {
                isKeyframe = buf.flags.contains() | (tr.samples.size() % tr.syncInterval == 0);
            }
        }
        // Encode palette data
        boolean paletteChange = false;
        if (buf.data instanceof BufferedImage && tr instanceof VideoTrack) {
            paletteChange = writePalette(track, (BufferedImagebuf.dataisKeyframe);
        } else if (buf.header instanceof IndexColorModel) {
            paletteChange = writePalette(track, (IndexColorModelbuf.headerisKeyframe);
        }
        // Encode sample data
        {
            if (buf.format == null) {
                throw new IllegalArgumentException("Buffer.format must not be null");
            }
            if (buf.format.matchesWithout(tr.format) && buf.data instanceof byte[]) {
                writeSamples(trackbuf.sampleCount, (byte[]) buf.databuf.offsetbuf.length,
                        buf.isFlag() && !paletteChange);
                return;
            }
            // We got here, because the buffer format does not match the track 
            // format. Lets see if we can create a codec which can perform the
            // encoding for us.
            if (tr.codec == null) {
                createCodec(track);
                if (tr.codec == null) {
                    throw new UnsupportedOperationException("No codec for this format " + tr.format);
                }
            }
            if (tr.outputBuffer == null) {
                tr.outputBuffer = new Buffer();
            }
            Buffer outBuf = tr.outputBuffer;
            if (tr.codec.process(bufoutBuf) != .) {
                throw new IOException("Codec failed or could not encode the sample in a single step.");
            }
            if (outBuf.isFlag()) {
                return;
            }
            writeSamples(trackoutBuf.sampleCount, (byte[]) outBuf.dataoutBuf.offsetoutBuf.length,
                    isKeyframe && !paletteChange);
        }
    }
    private boolean writePalette(int trackBufferedImage imageboolean isKeyframethrows IOException {
        if ((image.getColorModel() instanceof IndexColorModel)) {
            return writePalette(track, (IndexColorModelimage.getColorModel(), isKeyframe);
        }
        return false;
    }
    private boolean writePalette(int trackIndexColorModel imgPaletteboolean isKeyframethrows IOException {
        ensureStarted();
        VideoTrack vt = (VideoTrack.get(track);
        int imgDepth = vt.bitCount;
        ByteArrayImageOutputStream tmp = null;
        boolean paletteChange = false;
        switch (imgDepth) {
            case 4: {
                //IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
                int[] imgRGBs = new int[16];
                imgPalette.getRGBs(imgRGBs);
                int[] previousRGBs = new int[16];
                if (vt.previousPalette == null) {
                    vt.previousPalette = vt.palette;
                }
                vt.previousPalette.getRGBs(previousRGBs);
                if (isKeyframe || !Arrays.equals(imgRGBspreviousRGBs)) {
                    paletteChange = true;
                    vt.previousPalette = imgPalette;
                    /*
                     int first = imgPalette.getMapSize();
                     int last = -1;
                     for (int i = 0; i < 16; i++) {
                     if (previousRGBs[i] != imgRGBs[i] && i < first) {
                     first = i;
                     }
                     if (previousRGBs[i] != imgRGBs[i] && i > last) {
                     last = i;
                     }
                     }*/
                    int first = 0;
                    int last = imgPalette.getMapSize() - 1;
                    /*
                     * typedef struct {
                     BYTE         bFirstEntry;
                     BYTE         bNumEntries;
                     WORD         wFlags;
                     PALETTEENTRY peNew[];
                     } AVIPALCHANGE;
                     *
                     * typedef struct tagPALETTEENTRY {
                     BYTE peRed;
                     BYTE peGreen;
                     BYTE peBlue;
                     BYTE peFlags;
                     } PALETTEENTRY;
                     */
                    tmp = new ByteArrayImageOutputStream(.);
                    tmp.writeByte(first);//bFirstEntry
                    tmp.writeByte(last - first + 1);//bNumEntries
                    tmp.writeShort(0);//wFlags
                    for (int i = firsti <= lasti++) {
                        tmp.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
                        tmp.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
                        tmp.writeByte(imgRGBs[i] & 0xff); // blue
                        tmp.writeByte(0); // reserved*/
                    }
                }
                break;
            }
            case 8: {
                //IndexColorModel imgPalette = (IndexColorModel) image.getColorModel();
                int[] imgRGBs = new int[256];
                imgPalette.getRGBs(imgRGBs);
                int[] previousRGBs = new int[256];
                if (vt.previousPalette != null) {
                    vt.previousPalette.getRGBs(previousRGBs);
                }
                if (isKeyframe || !Arrays.equals(imgRGBspreviousRGBs)) {
                    paletteChange = true;
                    vt.previousPalette = imgPalette;
                    /*
                     int first = imgPalette.getMapSize();
                     int last = -1;
                     for (int i = 0; i < 16; i++) {
                     if (previousRGBs[i] != imgRGBs[i] && i < first) {
                     first = i;
                     }
                     if (previousRGBs[i] != imgRGBs[i] && i > last) {
                     last = i;
                     }
                     }*/
                    int first = 0;
                    int last = imgPalette.getMapSize() - 1;
                    /*
                     * typedef struct {
                     BYTE         bFirstEntry;
                     BYTE         bNumEntries;
                     WORD         wFlags;
                     PALETTEENTRY peNew[];
                     } AVIPALCHANGE;
                     *
                     * typedef struct tagPALETTEENTRY {
                     BYTE peRed;
                     BYTE peGreen;
                     BYTE peBlue;
                     BYTE peFlags;
                     } PALETTEENTRY;
                     */
                    tmp = new ByteArrayImageOutputStream(.);
                    tmp.writeByte(first);//bFirstEntry
                    tmp.writeByte(last - first + 1);//bNumEntries
                    tmp.writeShort(0);//wFlags
                    for (int i = firsti <= lasti++) {
                        tmp.writeByte((imgRGBs[i] >>> 16) & 0xff); // red
                        tmp.writeByte((imgRGBs[i] >>> 8) & 0xff); // green
                        tmp.writeByte(imgRGBs[i] & 0xff); // blue
                        tmp.writeByte(0); // reserved*/
                    }
                }
                break;
            }
        }
        if (tmp != null) {
            tmp.close();
            writePalette(tracktmp.toByteArray(), 0, (inttmp.length(), isKeyframe);
        }
        return paletteChange;
    }
    private Codec createCodec(Format fmt) {
        return Registry.getInstance().getEncoder(fmt.prepend());
    }
    private void createCodec(int track) {
        Track tr = .get(track);
        Format fmt = tr.format;
        tr.codec = createCodec(fmt);
        String enc = fmt.get();
        if (tr.codec != null) {
            if (fmt.get() == .) {
                tr.codec.setInputFormat(fmt.prepend(
                        ,
                        BufferedImage.class));
                if (null == tr.codec.setOutputFormat(
                        fmt.prepend(true,
                        getCompressionQuality(track),
                        ,
                        byte[].class))) {
                    throw new UnsupportedOperationException("Track " + tr + " codec does not support format " + fmt + ". codec=" + tr.codec);
                }
            } else {
                tr.codec.setInputFormat(null);
                if (null == tr.codec.setOutputFormat(
                        fmt.prepend(true,
                        getCompressionQuality(track),
                        ,
                        byte[].class))) {
                    throw new UnsupportedOperationException("Track " + tr + " codec " + tr.codec + " does not support format. " + fmt);
                }
            }
        }
    }
    public boolean isVFRSupported() {
        return false;
    }
    @Override
    public boolean isEmpty(int track) {
        return .get(track)..isEmpty();
    }
New to GrepCode? Check out our FAQ X