Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
   /*
    * @(#)ScreenRecorder.java 
    * 
    * Copyright (c) 2011-2012 Werner Randelshofer, Goldau, Switzerland.
    * All rights reserved.
    * 
    * You may not use, copy or modify this file, except in compliance with the
    * license agreement you entered into with Werner Randelshofer.
    * For details see accompanying license terms.
   */
  package org.monte.screenrecorder;
  
  import java.awt.AWTEvent;
  import java.awt.Point;
  import java.awt.Robot;
  import java.awt.Toolkit;
  import java.io.File;
  import static java.lang.Math.*;
  import java.util.Date;
  import java.util.List;
  import static org.monte.media.AudioFormatKeys.*;
  import static org.monte.media.BufferFlag.*;
  import static org.monte.media.FormatKeys.EncodingKey;
  import static org.monte.media.FormatKeys.FrameRateKey;
  import static org.monte.media.FormatKeys.MIME_QUICKTIME;
  import static org.monte.media.FormatKeys.MediaTypeKey;
  import static org.monte.media.FormatKeys.MimeTypeKey;
  import static org.monte.media.VideoFormatKeys.*;
A screen recorder written in pure Java.

Captures the screen, the mouse cursor and audio.

This class records mouse clicks occurring on other Java Windows running in the same JVM. Mouse clicks occurring in other JVM's and other processes are not recorded. This ability is useful for performing in-JVM recordings of an application that is being tested.

This recorder uses four threads. Three capture threads for screen, mouse cursor and audio, and one output thread for the movie writer.

FIXME - This class is a horrible mess.

Author(s):
Werner Randelshofer
Version:
$Id: ScreenRecorder.java 303 2013-01-03 07:43:37Z werner $
  
  public class ScreenRecorder extends AbstractStateModel {
  
      public enum State {
  
          DONE, FAILED, RECORDING
      }
      private State state = .;
      private String stateMessage = null;
    
"Encoding" for black mouse cursor.
 
     public final static String ENCODING_BLACK_CURSOR = "black";
    
"Encoding" for white mouse cursor.
 
     public final static String ENCODING_WHITE_CURSOR = "white";
    
"Encoding" for yellow mouse cursor.
 
     public final static String ENCODING_YELLOW_CURSOR = "yellow";
    
The file format. "AVI" or "QuickTime"
 
     private Format fileFormat;
    
The input video format for cursor capture. "black" or "white".
 
     protected Format mouseFormat;
    
The input video format for screen capture.
 
     private Format screenFormat;
    
The input and output format for audio capture.
 
     private Format audioFormat;
    
The bounds of the graphics device that we capture with AWT Robot.
 
     private Rectangle captureArea;
    
The writer for the movie file.
 
     private MovieWriter w;
    
The start time of the recording.
 
     protected long recordingStartTime;
    
The stop time of the recording.
 
     protected volatile long recordingStopTime;
    
The start time of the current movie file.
 
     private long fileStartTime;
    
Holds the mouse captures made with MouseInfo.
 
     private ArrayBlockingQueue<BuffermouseCaptures;
    
Timer for screen captures.
 
Timer for mouse captures.
 
     protected ScheduledThreadPoolExecutor mouseCaptureTimer;
    
Thread for audio capture.
 
Thread for file writing.
 
     private volatile Thread writerThread;
    
Mouse cursor.
 
     private BufferedImage cursorImg;
     private BufferedImage cursorImgPressed;
    
Hot spot of the mouse cursor in cursorImg.
 
     private Point cursorOffset;
    
Object for thread synchronization.
 
     private final Object sync = new Object();
     private ArrayBlockingQueue<BufferwriterQueue;
    
This codec encodes a video frame.
 
     private Codec frameEncoder;
    
outputTime and ffrDuration are needed for conversion of the video stream from variable frame rate to fixed frame rate. FIXME - Do this with a CodecChain.
 
     private Rational outputTime;
     private Rational ffrDuration;
     private ArrayList<FilerecordedFiles;
    
Id of the video track.
 
     protected int videoTrack = 0;
    
Id of the audio track.
 
     protected int audioTrack = 1;
    
The device from which screen captures are generated.
 
     private GraphicsDevice captureDevice;
     private AudioGrabber audioGrabber;
     private ScreenGrabber screenGrabber;
     protected MouseGrabber mouseGrabber;
     private ScheduledFuture audioFuture;
     private ScheduledFuture screenFuture;
     protected ScheduledFuture mouseFuture;
    
Where to store the movie.
 
     protected File movieFolder;
     private long maxRecordingTime = 60 * 60 * 1000;
     private long maxFileSize = .;
    
Audio mixer used for audio input. Set to null for default audio input.
 
     private Mixer mixer;

    
Creates a screen recorder.

Parameters:
cfg Graphics configuration of the capture screen.
 
     public ScreenRecorder(GraphicsConfiguration cfgthrows IOExceptionAWTException {
         this(cfgnull,
                 // the file format
                 new Format(.,
                 ),
                 //
                 // the output format for screen capture
                 new Format(.,
                 ,
                 ,
                 , 24, new Rational(15, 1)),
                 //
                 // the output format for mouse capture 
                 new Format(.,
                 ,
                 new Rational(30, 1)),
                 //
                 // the output format for audio capture 
                 new Format(.,
                 ,
                 new Rational(48000, 1),
                 , 16,
                 , 2, new Rational(48000, 1),
                 true.));
     }

    
Creates a screen recorder.

Parameters:
cfg Graphics configuration of the capture screen.
fileFormat The file format "AVI" or "QuickTime".
screenFormat The video format for screen capture.
mouseFormat The video format for mouse capture. The EncodingKey must be ENCODING_BLACK_CURSOR or ENCODING_WHITE_CURSOR. The SampleRateKey can be independent from the screenFormat. Specify null if you don't want to capture the mouse cursor.
audioFormat The audio format for audio capture. Specify null if you don't want audio capture.
 
             Format fileFormat,
             Format screenFormat,
             Format mouseFormat,
             Format audioFormatthrows IOExceptionAWTException {
         this(cfgnullfileFormatscreenFormatmouseFormataudioFormat);
     }

    
Creates a screen recorder.

Parameters:
cfg Graphics configuration of the capture screen.
captureArea Defines the area of the screen that shall be captured.
fileFormat The file format "AVI" or "QuickTime".
screenFormat The video format for screen capture.
mouseFormat The video format for mouse capture. The EncodingKey must be ENCODING_BLACK_CURSOR or ENCODING_WHITE_CURSOR. The SampleRateKey can be independent from the screenFormat. Specify null if you don't want to capture the mouse cursor.
audioFormat The audio format for audio capture. Specify null if you don't want audio capture.
 
             Rectangle captureArea,
             Format fileFormat,
             Format screenFormat,
             Format mouseFormat,
             Format audioFormatthrows IOExceptionAWTException {
         this(cfgnullfileFormatscreenFormatmouseFormataudioFormatnull);
     }

    
Creates a screen recorder.

Parameters:
cfg Graphics configuration of the capture screen.
captureArea Defines the area of the screen that shall be captured.
fileFormat The file format "AVI" or "QuickTime".
screenFormat The video format for screen capture.
mouseFormat The video format for mouse capture. The EncodingKey must be ENCODING_BLACK_CURSOR or ENCODING_WHITE_CURSOR. The SampleRateKey can be independent from the screenFormat. Specify null if you don't want to capture the mouse cursor.
audioFormat The audio format for audio capture. Specify null if you don't want audio capture.
movieFolder Where to store the movie
 
             Rectangle captureArea,
             Format fileFormat,
             Format screenFormat,
             Format mouseFormat,
             Format audioFormat,
             File movieFolderthrows IOExceptionAWTException {
 
         this. = fileFormat;
         this. = screenFormat;
         this. = mouseFormat;
         if (this. == null) {
             this. = new Format(new Rational(0, 0), );
         }
         this. = audioFormat;
         this. = new ArrayList<File>();
         this. = cfg.getDevice();
         this. = (captureArea == null) ? cfg.getBounds() : captureArea;
         if (mouseFormat != null && mouseFormat.get().intValue() > 0) {
              = new ArrayBlockingQueue<Buffer>(mouseFormat.get().intValue() * 2);
             if (this..get().equals()) {
                  = Images.toBufferedImage(Images.createImage(ScreenRecorder.class"images/Cursor.black.png"));
                  = Images.toBufferedImage(Images.createImage(ScreenRecorder.class"images/Cursor.black.pressed.png"));
             } else if (this..get().equals()) {
                  = Images.toBufferedImage(Images.createImage(ScreenRecorder.class"images/Cursor.yellow.png"));
                  = Images.toBufferedImage(Images.createImage(ScreenRecorder.class"images/Cursor.yellow.pressed.png"));
             } else {
                  = Images.toBufferedImage(Images.createImage(ScreenRecorder.class"images/Cursor.white.png"));
                  = Images.toBufferedImage(Images.createImage(ScreenRecorder.class"images/Cursor.white.pressed.png"));
             }
              = new Point(.getWidth() / -2, .getHeight() / -2);
         }
         this. = movieFolder;
         if (this. == null) {
             this. = new File(System.getProperty("java.io.tmpdir") + . + "screenrecorder");
         }
 
     }
 
     protected MovieWriter createMovieWriter() throws IOException {
         File f = createMovieFile();
         .add(f);
 
         MovieWriter mw =  = Registry.getInstance().getWriter(f);
 
         // Create the video encoder
         Rational videoRate = Rational.max(.get(), .get());
          = videoRate.inverse();
         Format videoInputFormat = .prepend(.,
                 ,
                 .,
                 .,
                 videoRate);
         Format videoOutputFormat = .prepend(
                 videoRate,
                 .get())//
                 //
                 .append(//
                 .,
                 .);
 
          = .addTrack(videoOutputFormat);
         if ( != null) {
              = .addTrack();
         }
 
         Codec encoder = Registry.getInstance().getEncoder(.getFormat());
         if (encoder == null) {
             throw new IOException("No encoder for format " + .getFormat());
         }
          = encoder;
         .setInputFormat(videoInputFormat);
         .setOutputFormat(videoOutputFormat);
         if (.getOutputFormat() == null) {
             throw new IOException("Unable to encode video frames in this output format:\n" + videoOutputFormat);
         }
 
         // If the capture area does not have the same dimensions as the
         // video format, create a codec chain which scales the image before
         // performing the frame encoding.
         if (!videoInputFormat.intersectKeys().matches(
                 videoOutputFormat.intersectKeys())) {
             ScaleImageCodec sic = new ScaleImageCodec();
             sic.setInputFormat(videoInputFormat);
             sic.setOutputFormat(videoOutputFormat.intersectKeys().append(videoInputFormat));
              = new CodecChain(sic);
         }
 
 
         // FIXME - There should be no need for format-specific code.
         if (.get() == 8) {
             if ( instanceof AVIWriter) {
                 AVIWriter aviw = (AVIWriter;
                 aviw.setPalette(, Colors.createMacColors());
             } else if ( instanceof QuickTimeWriter) {
                 QuickTimeWriter qtw = (QuickTimeWriter;
                 qtw.setVideoColorTable(, Colors.createMacColors());
             }
         }
 
          = System.currentTimeMillis();
         return mw;
     }

    
Returns a list of all files that the screen recorder created.
 
     public List<FilegetCreatedMovieFiles() {
         return Collections.unmodifiableList();
     }

    
Creates a file for recording the movie.

This implementation creates a file in the users "Video" folder on Windows, or in the users "Movies" folders on Mac OS X.

You can override this method, if you would like to create a movie file at a different location.

Parameters:
fileFormat
Returns:
the file
Throws:
java.io.IOException
 
     protected File createMovieFile(Format fileFormatthrows IOException {
         if (!.exists()) {
             .mkdirs();
         } else if (!.isDirectory()) {
             throw new IOException("\"" +  + "\" is not a directory.");
         }
 
         SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd 'at' HH.mm.ss");
 
         File f = new File(,//
                 "ScreenRecording " + dateFormat.format(new Date()) + "." + Registry.getInstance().getExtension(fileFormat));
         return f;
     }

    
Returns the state of the recorder.
 
     public State getState() {
         return ;
     }

    
Returns the state of the recorder.
 
     public String getStateMessage() {
         return ;
     }

    
Sets the state of the recorder and fires a ChangeEvent.
 
     private void setState(State newValueString msg) {
          = newValue;
          = msg;
         fireStateChanged();
     }
 
     public long getStartTime() {
         return ;
     }

    
Starts the screen recorder.
 
     public void start() throws IOException {
         stop();
         .clear();
         createMovieWriter();
         try {
              = System.currentTimeMillis();
              = .;
 
              = new Rational(0, 0);
             startWriter();
             try {
                 startScreenCapture();
             } catch (AWTException e) {
                 IOException ioe = new IOException("Start screen capture failed");
                 ioe.initCause(e);
                 stop();
                 throw ioe;
             } catch (IOException ioe) {
                 stop();
                 throw ioe;
             }
             if ( != null && .get().intValue() > 0) {
                 startMouseCapture();
             }
             if ( != null) {
                 try {
                     startAudioCapture();
                 } catch (LineUnavailableException e) {
                     IOException ioe = new IOException("Start audio capture failed");
                     ioe.initCause(e);
                     stop();
                     throw ioe;
                 }
             }
             setState(.null);
         } catch (IOException e) {
             stop();
             throw e;
         }
     }

    
Starts screen capture.
 
     private void startScreenCapture() throws AWTExceptionIOException {
          = new ScheduledThreadPoolExecutor(1);
         int delay = max(1, (int) (1000 / .get().doubleValue()));
          = new ScreenGrabber(this);
     }
 
     private static class ScreenGrabber implements Runnable {

        
Previously draw mouse location. This is used to have the last mouse location at hand, when a new screen capture has been created, but the mouse has not been moved.
 
         private Point prevDrawnMouseLocation = new Point(..);
         private boolean prevMousePressed = false;
        
Holds the screen capture made with AWT Robot.
 
         private BufferedImage screenCapture;
         private ScreenRecorder recorder;
         private ScheduledThreadPoolExecutor screenTimer;
        
The AWT Robot which we use for capturing the screen.
 
         private Robot robot;
         private Rectangle captureArea;
        
Holds the composed image (screen capture and super-imposed mouse cursor). This is the image that is written into the video track of the file.
 
         private BufferedImage videoImg;
        
Graphics object for drawing into videoImg.
 
         private Graphics2D videoGraphics;
         private final Format mouseFormat;
        
Holds the mouse captures made with MouseInfo.
 
         private ArrayBlockingQueue<BuffermouseCaptures;
        
The time the previous screen frame was captured.
 
         private Rational prevScreenCaptureTime;
         private final Object sync;
         private BufferedImage cursorImgcursorImgPressed;
         private Point cursorOffset;
         private int videoTrack;
         private long startTime;
         private volatile long stopTime = .;
         private ScheduledFuture future;
         private long sequenceNumber;
 
         public void setFuture(ScheduledFuture future) {
             this. = future;
         }
 
         public synchronized void setStopTime(long newValue) {
             this. = newValue;
         }
 
         public synchronized long getStopTime() {
             return this.;
         }
 
         public ScreenGrabber(ScreenRecorder recorderlong startTimethrows AWTExceptionIOException {
             this. = recorder;
             this. = recorder.captureArea;
             this. = new Robot(recorder.captureDevice);
             this. = recorder.mouseFormat;
             this. = recorder.mouseCaptures;
             this. = recorder.sync;
             this. = recorder.cursorImg;
             this. = recorder.cursorImgPressed;
             this. = recorder.cursorOffset;
             this. = recorder.videoTrack;
             this. = new Rational(startTime, 1000);
             this. = startTime;
 
             Format screenFormat = recorder.screenFormat;
             if (screenFormat.get(, 24) == 24) {
                  = new BufferedImage(this..this...);
             } else if (screenFormat.get() == 16) {
                  = new BufferedImage(this..this...);
             } else if (screenFormat.get() == 8) {
                  = new BufferedImage(this..this..., Colors.createMacColors());
             } else {
                 throw new IOException("Unsupported color depth " + screenFormat.get());
             }
              = .createGraphics();
         }
 
         @Override
         public void run() {
             try {
                 grabScreen();
             } catch (Throwable ex) {
                 ex.printStackTrace();
                 .shutdown();
                 .recordingFailed(ex.getMessage());
             }
         }

        
Grabs a screen, generates video images with pending mouse captures and writes them into the movie file.
 
         private void grabScreen() throws IOExceptionInterruptedException {
             // Capture the screen
             BufferedImage previousScreenCapture = ;
             long timeBeforeCapture = System.currentTimeMillis();
             try {
                  = .createScreenCapture();
             } catch (IllegalMonitorStateException e) {
                 //IOException ioe= new IOException("Could not grab screen");
                 //ioe.initCause(e);
                 //throw ioe;
                 // Screen capture failed due to a synchronization error
                 return;
             }
             long timeAfterCapture = System.currentTimeMillis();
             if (previousScreenCapture == null) {
                 previousScreenCapture = ;
             }
             .drawImage(previousScreenCapture, 0, 0, null);
 
             Buffer buf = new Buffer();
             buf.format = new Format(.);
             // Generate video frames with mouse cursor painted on them
             boolean hasMouseCapture = false;
             if ( != null && .get().intValue() > 0) {
                 while (!.isEmpty() && .peek()..compareTo(new Rational(timeAfterCapture, 1000)) < 0) {
                     Buffer mouseCapture = .poll();
                     if (mouseCapture.timeStamp.compareTo() > 0) {
                         if (mouseCapture.timeStamp.compareTo(new Rational(timeBeforeCapture, 1000)) < 0) {
                             previousScreenCapture = ;
                             .drawImage(previousScreenCapture, 0, 0, null);
                         }
 
                         Point mcp = (PointmouseCapture.data;
                          = (BooleanmouseCapture.header;
                         .setLocation(mcp.x - .mcp.y - .);
                         Point p = ;
 
                         long localStopTime = getStopTime();
                         if (mouseCapture.timeStamp.compareTo(new Rational(localStopTime, 1000)) > 0) {
                             break;
                         }
                         {
                             hasMouseCapture = true;
 
                             // draw cursor
                             if () {
                                 .drawImage(p.x + .p.y + .null);
                             } else {
                                 .drawImage(p.x + .p.y + .null);
                             }
                             buf.clearFlags();
                             buf.data = ;
                             buf.sampleDuration = mouseCapture.timeStamp.subtract();
                             buf.timeStamp = .subtract(new Rational(, 1000));
                             buf.track = ;
                             buf.sequenceNumber = ++;
 
                             // Fudge mouse position into the header
                             buf.header = p.x == . ? null : p;
                             .write(buf);
                              = mouseCapture.timeStamp;
 
                             // erase cursor
                             .drawImage(previousScreenCapture//
                                     p.x + .p.y + .,//
                                     p.x + . + .getWidth() - 1, p.y + . + .getHeight() - 1,//
                                     p.x + .p.y + .,//
                                     p.x + . + .getWidth() - 1, p.y + . + .getHeight() - 1,//
                                     null);
                         }
 
                     }
                 }
 
                 if (!hasMouseCapture && .compareTo(new Rational(getStopTime(), 1000)) < 0) {
                     Point p = ;
                     if (p != null) {
                         if () {
                             .drawImage(p.x + .p.y + .null);
                         } else {
                             .drawImage(p.x + .p.y + .null);
                         }
                     }
 
                     buf.data = ;
                     buf.sampleDuration = new Rational(timeAfterCapture, 1000).subtract();
                     buf.timeStamp = .subtract(new Rational(, 1000));
                     buf.track = ;
                     buf.sequenceNumber = ++;
                     buf.header = p.x == . ? null : p;
                     .write(buf);
                      = new Rational(timeAfterCapture, 1000);
                     if (p != null) {//erase cursor
                         .drawImage(previousScreenCapture//
                                 p.x + .p.y + .,//
                                 p.x + . + .getWidth() - 1, p.y + . + .getHeight() - 1,//
                                 p.x + .p.y + .,//
                                 p.x + . + .getWidth() - 1, p.y + . + .getHeight() - 1,//
                                 null);
                     }
                 }
             } else if (.compareTo(new Rational(getStopTime(), 1000)) < 0) {
                 buf.data = ;
                 buf.sampleDuration = new Rational(timeAfterCapture, 1000).subtract();
                 buf.timeStamp = .subtract(new Rational(, 1000));
                 buf.track = ;
                 buf.sequenceNumber = ++;
                 buf.header = null// no mouse position has been recorded for this frame
                 .write(buf);
                  = new Rational(timeAfterCapture, 1000);
             }
 
             if (timeBeforeCapture > getStopTime()) {
                 .cancel(false);
             }
         }
 
         public void close() {
             .dispose();
             .flush();
         }
     }

    
Starts mouse capture.
 
     protected void startMouseCapture() throws IOException {
          = new ScheduledThreadPoolExecutor(1);
         int delay = max(1, (int) (1000 / .get().doubleValue()));
         final MouseGrabber mouseGrabberF = ;
          = new AWTEventListener() {
             @Override
             public void eventDispatched(AWTEvent event) {
                 if (event.getID() == .) {
                     mouseGrabberF.setMousePressed(true);
                 } else if (event.getID() == .) {
                     mouseGrabberF.setMousePressed(false);
                 }
             }
         };
     }

    
Stops mouse capturing. Use method waitUntilMouseCaptureStopped() to wait until the capturing stopped.
 
     protected void stopMouseCapture() {
         if ( != null) {
             .setStopTime();
         }
         if ( != null) {
             Toolkit.getDefaultToolkit().removeAWTEventListener();
              = null;
         }
     }

    
Waits until mouse capturing stopped. Invoke this method only after you invoked stopMouseCapture().
 
     protected void waitUntilMouseCaptureStopped() throws InterruptedException {
         if ( != null) {
             try {
                 .get();
             } catch (InterruptedException ex) {
             } catch (CancellationException ex) {
             } catch (ExecutionException ex) {
             }
             .shutdown();
             .awaitTermination(5000, .);
              = null;
             .close();
              = null;
         }
     }
 
     protected static class MouseGrabber implements Runnable {

        
Previously captured mouse location. This is used to coalesce mouse captures if the mouse has not been moved.
 
         private Point prevCapturedMouseLocation = new Point(..);
         private ScheduledThreadPoolExecutor timer;
         private ScreenRecorder recorder;
         private GraphicsDevice captureDevice;
         private Rectangle captureArea;
         private BlockingQueue<BuffermouseCaptures;
         private volatile long stopTime = .;
         private long startTime;
         private Format format;
         private ScheduledFuture future;
         private volatile boolean mousePressed;
         private volatile boolean mouseWasPressed;
         private volatile boolean mousePressedRecorded;
 
         public MouseGrabber(ScreenRecorder recorderlong startTimeScheduledThreadPoolExecutor timer) {
             this. = timer;
             this. = recorder.mouseFormat;
             this. = recorder.captureDevice;
             this. = recorder.captureArea;
             this. = recorder.mouseCaptures;
             this. = startTime;
         }
 
         public void setFuture(ScheduledFuture future) {
             this. = future;
         }
 
         public void setMousePressed(boolean newValue) {
             if (newValue) {
                  = true;
             }
              = newValue;
         }
 
         @Override
         public void run() {
             try {
                 grabMouse();
             } catch (Throwable ex) {
                 ex.printStackTrace();
                 .shutdown();
                 .recordingFailed(ex.getMessage());
             }
         }
 
         public synchronized void setStopTime(long newValue) {
             this. = newValue;
         }
 
         public synchronized long getStopTime() {
             return this.;
         }

        
Captures the mouse cursor.
 
         private void grabMouse() throws InterruptedException {
             long now = System.currentTimeMillis();
             if (now > getStopTime()) {
                 .cancel(false);
                 return;
             }
             PointerInfo info = MouseInfo.getPointerInfo();
             Point p = info.getLocation();
             if (!info.getDevice().equals()
                     || !.contains(p)) {
                 // If the cursor is outside the capture region, we
                 // assign Integer.MAX_VALUE to its location.
                 // This ensures that all mouse movements outside of the
                 // capture region get coallesced. 
                 p.setLocation(..);
             }
 
             // Only create a new capture event if the location has changed
             if (!p.equals() ||  != ) {
                 Buffer buf = new Buffer();
                 buf.format = ;
                 buf.timeStamp = new Rational(now, 1000);
                 buf.data = p;
                 buf.header = ;
                  = ;
                 .put(buf);
                 .setLocation(p);
             }
             if (!) {
                  = false;
             }
         }
 
         public void close() {
         }
     }

    
Starts audio capture.
 
     private void startAudioCapture() throws LineUnavailableException {
          = new ScheduledThreadPoolExecutor(1);
         int delay = 500;
     }

    
Returns the audio level of the left channel or of the mono channel.

Returns:
A value in the range [0.0,1.0] or AudioSystem.NOT_SPECIFIED.
 
     public float getAudioLevelLeft() {
         AudioGrabber ag = ;
         if (ag != null) {
             return ag.getAudioLevelLeft();
         }
         return .;
     }

    
Returns the audio level of the right channel.

Returns:
A value in the range [0.0,1.0] or AudioSystem.NOT_SPECIFIED.
 
     public float getAudioLevelRight() {
         AudioGrabber ag = ;
         if (ag != null) {
             return ag.getAudioLevelRight();
         }
         return .;
     }

    
This runnable grabs audio samples and enqueues them into the specified BlockingQueue. This runnable must be called twice a second.
 
     private static class AudioGrabber implements Runnable {
 
         final private TargetDataLine line;
         final private BlockingQueue<Bufferqueue;
         final private Format audioFormat;
         final private int audioTrack;
         final private long startTime;
         private volatile long stopTime = .;
         private long totalSampleCount;
         private ScheduledFuture future;
         private long sequenceNumber;
         private float audioLevelLeft = .;
         private float audioLevelRight = .;
         private Mixer mixer;
 
         public AudioGrabber(Mixer mixerFormat audioFormatint audioTracklong startTimeBlockingQueue<Bufferqueue)
                 throws LineUnavailableException {
             this. = mixer;
             this. = audioFormat;
             this. = audioTrack;
             this. = queue;
             this. = startTime;
             DataLine.Info lineInfo = new DataLine.Info(
                     TargetDataLine.class, AudioFormatKeys.toAudioFormat(audioFormat));
 
             if (mixer != null) {
                  = (TargetDataLinemixer.getLine(lineInfo);
             } else {
 
                  = (TargetDataLine) AudioSystem.getLine(lineInfo);
             }
 
             // Make sure the line is not muted
             try {
                 BooleanControl ctrl = (BooleanControl.getControl(..);
                 ctrl.setValue(false);
             } catch (IllegalArgumentException e) {
                 // We can't unmute the line from Java
             }
             // Make sure the volume of the line is bigger than 0.2
             try {
                FloatControl ctrl = (FloatControl.getControl(..);
                ctrl.setValue(Math.max(ctrl.getValue(), 0.2f));
            } catch (IllegalArgumentException e) {
                // We can't change the volume from Java
            }
            .open();
            .start();
        }
        public void setFuture(ScheduledFuture future) {
            this. = future;
        }
        public void close() {
            .close();
        }
        public synchronized void setStopTime(long newValue) {
            this. = newValue;
        }
        public synchronized long getStopTime() {
            return this.;
        }
        @Override
        public void run() {
            Buffer buf = new Buffer();
            AudioFormat lineFormat = .getFormat();
            buf.format = fromAudioFormat(lineFormat).append(true);
            // For even sample rates, we select a buffer size that can 
            // hold half a second of audio. This allows audio/video interlave
            // twice a second, as recommended for AVI and QuickTime movies.
            // For odd sample rates, we have to select a buffer size that can hold
            // one second of audio. 
            int bufferSize = lineFormat.getFrameSize() * (intlineFormat.getSampleRate();
            if (((intlineFormat.getSampleRate() & 1) == 0) {
                bufferSize /= 2;
            }
            byte bdat[] = new byte[bufferSize];
            buf.data = bdat;
            Rational sampleRate = Rational.valueOf(lineFormat.getSampleRate());
            Rational frameRate = Rational.valueOf(lineFormat.getFrameRate());
            int count = .read(bdat, 0, bdat.length);
            if (count > 0) {
                computeAudioLevel(bdatcountlineFormat);
                buf.sampleCount = count / (lineFormat.getSampleSizeInBits() / 8 * lineFormat.getChannels());
                buf.sampleDuration = sampleRate.inverse();
                buf.offset = 0;
                buf.sequenceNumber = ++;
                buf.length = count;
                buf.track = ;
                buf.timeStamp = new Rational(, 1).divide(frameRate);
                // Check if recording should be stopped
                Rational stopTS = new Rational(getStopTime() - , 1000);
                if (buf.timeStamp.add(buf.sampleDuration.multiply(buf.sampleCount)).compareTo(stopTS) > 0) {
                    // we recorderd too much => truncate the buffer 
                    buf.sampleCount = Math.max(0, (int) Math.ceil(stopTS.subtract(buf.timeStamp).divide(buf.sampleDuration).floatValue()));
                    buf.length = buf.sampleCount * (lineFormat.getSampleSizeInBits() / 8 * lineFormat.getChannels());
                    .cancel(false);
                }
                if (buf.sampleCount > 0) {
                    try {
                        .put(buf);
                    } catch (InterruptedException ex) {
                        // nothing to do
                    }
                }
                 += buf.sampleCount;
            }
        }

        
Calculates the root-mean-square average of continuous samples. For four samples, the formula looks like this:
 rms = sqrt( (x0^2 + x1^2 + x2^2 + x3^2) / 4)
 
Resources: http://www.jsresources.org/faq_audio.html#calculate_power

Parameters:
data
length
format
        private void computeAudioLevel(byte[] dataint lengthAudioFormat format) {
            if (format.getEncoding().equals(..)) {
                switch (format.getSampleSizeInBits()) {
                    case 8:
                        switch (format.getChannels()) {
                            case 1:
                                 = computeAudioLevelSigned8(data, 0, lengthformat.getFrameSize());
                                break;
                            case 2:
                                 = computeAudioLevelSigned8(data, 0, lengthformat.getFrameSize());
                                 = computeAudioLevelSigned8(data, 1, lengthformat.getFrameSize());
                                break;
                        }
                        break;
                    case 16:
                        if (format.isBigEndian()) {
                            switch (format.getChannels()) {
                                case 1:
                                     = computeAudioLevelSigned16BE(data, 0, lengthformat.getFrameSize());
                                    break;
                                case 2:
                                     = computeAudioLevelSigned16BE(data, 0, lengthformat.getFrameSize());
                                     = computeAudioLevelSigned16BE(data, 2, lengthformat.getFrameSize());
                                    break;
                            }
                        } else {
                            switch (format.getChannels()) {
                                case 1:
                                    break;
                                case 2:
                                    break;
                            }
                        }
                        break;
                }
            }
        }
        private float computeAudioLevelSigned16BE(byte[] dataint offsetint lengthint stride) {
            double sum = 0;