!7
// AudioLoop from jsresources.org
import javax.sound.sampled.DataLine;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.AudioFileFormat;
import gnu.getopt.Getopt; lib 1009286 // getopt
static boolean DEBUG;
static final int DEFAULT_INTERNAL_BUFSIZ = 40960;
static final int DEFAULT_EXTERNAL_BUFSIZ = 40960;
p {
String strMixerName = null;
float fFrameRate = 44100.0F;
int nInternalBufferSize = DEFAULT_INTERNAL_BUFSIZ;
int nExternalBufferSize = DEFAULT_EXTERNAL_BUFSIZ;
Getopt g = new Getopt("AudioLoop", args, "hlr:i:e:M:D");
int c;
while ((c = g.getopt()) != -1)
{
switch (c)
{
case 'h':
printUsage();
ret;
case 'l':
javaSound_listMixers();
ret;
case 'r':
fFrameRate = Float.parseFloat(g.getOptarg());
if (DEBUG) { out("AudioLoop.main(): frame rate: " + fFrameRate); }
break;
case 'i':
nInternalBufferSize = Integer.parseInt(g.getOptarg());
if (DEBUG) { out("AudioLoop.main(): internal buffer size: " + nInternalBufferSize); }
break;
case 'e':
nExternalBufferSize = Integer.parseInt(g.getOptarg());
if (DEBUG) { out("AudioLoop.main(): external buffer size: " + nExternalBufferSize); }
break;
case 'M':
strMixerName = g.getOptarg();
if (DEBUG) { out("AudioLoop.main(): mixer name: " + strMixerName); }
break;
case 'D':
DEBUG = true;
break;
case '?':
printUsage();
ret;
default:
out("AudioLoop.main(): getopt() returned: " + c);
break;
}
}
AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fFrameRate, 16, 2, 4, fFrameRate, false);
if (DEBUG) { out("AudioLoop.main(): audio format: " + audioFormat); }
AudioLoop audioLoop = null;
try
{
audioLoop = new AudioLoop(audioFormat,
nInternalBufferSize,
nExternalBufferSize,
strMixerName);
}
catch (LineUnavailableException e)
{
e.printStackTrace();
System.exit(1);
}
audioLoop.start();
}
// TODO: params for audio quality, optionally use compression and decompression in the loop (see ~/AudioLoop.java)
/** AudioLoop
Recording and playing back the recorded data immediatelyPurpose
This program opens two lines: one for recording and one
for playback. In an infinite loop, it reads data from
the recording line and writes them to the playback line.
You can use this to measure the delays inside Java Sound:
Speak into the microphone and wait untill you hear
yourself in the speakers. This can be used to
experience the effect of changing the buffer sizes: use
the and options.
You will notice that the
delays change, too.
Usagejava AudioLoopjava AudioLoopParameterslists the available mixersselects a mixer to play onthe buffer size to use in the application ("extern")the buffer size to use in Java Sound ("intern")Bugs, limitations
There is no way to stop the program besides brute force
(ctrl-C). There is no way to set the audio quality.
The example requires that the soundcard and its driver
as well as the Java Sound implementation support full-duplex
operation. In Linux either use Tritonus or enable
full-duplex in Sun's Java Sound implementation (search the
archive of java-linux).Source codeAudioLoop.java,
AudioCommon.java,
gnu.getopt.Getopt
*/
sclass AudioLoop extends Thread {
private TargetDataLine m_targetLine;
private SourceDataLine m_sourceLine;
private boolean m_bRecording;
private int m_nExternalBufferSize;
/*
* We have to pass an AudioFormat to describe in which
* format the audio data should be recorded and played.
*/
public AudioLoop(AudioFormat format,
int nInternalBufferSize,
int nExternalBufferSize,
String strMixerName)
throws LineUnavailableException
{
Mixer mixer = null;
if (strMixerName != null)
{
Mixer.Info mixerInfo = javaSound_getMixerInfo(strMixerName);
if (DEBUG) { out("AudioLoop.(): mixer info: " + mixerInfo); }
mixer = AudioSystem.getMixer(mixerInfo);
if (DEBUG) { out("AudioLoop.(): mixer: " + mixer); }
}
/*
* We retrieve and open the recording and the playback line.
*/
DataLine.Info targetInfo = new DataLine.Info(TargetDataLine.class, format, nInternalBufferSize);
DataLine.Info sourceInfo = new DataLine.Info(SourceDataLine.class, format, nInternalBufferSize);
if (mixer != null)
{
m_targetLine = (TargetDataLine) mixer.getLine(targetInfo);
m_sourceLine = (SourceDataLine) mixer.getLine(sourceInfo);
}
else
{
m_targetLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
m_sourceLine = (SourceDataLine) AudioSystem.getLine(sourceInfo);
}
if (DEBUG) { out("AudioLoop.(): SourceDataLine: " + m_sourceLine); }
if (DEBUG) { out("AudioLoop.(): TargetDataLine: " + m_targetLine); }
m_targetLine.open(format, nInternalBufferSize);
m_sourceLine.open(format, nInternalBufferSize);
m_nExternalBufferSize = nExternalBufferSize;
}
public void start() {
m_targetLine.start();
m_sourceLine.start();
super.start();
}
public void run() {
byte[] abBuffer = new byte[m_nExternalBufferSize];
int nBufferSize = abBuffer.length;
m_bRecording = true;
while (m_bRecording) {
if (DEBUG) { out("Trying to read: " + nBufferSize); }
/*
* read a block of data from the recording line.
*/
int nBytesRead = m_targetLine.read(abBuffer, 0, nBufferSize);
if (DEBUG) { out("Read: " + nBytesRead); }
/*
* And now, we write the block to the playback
* line.
*/
m_sourceLine.write(abBuffer, 0, nBytesRead);
}
}
}
svoid out(S s) { print(s); }
static void printUsage() {
out("AudioLoop: usage:");
out("\tjava AudioLoop -h");
out("\tjava AudioLoop -l");
out("\tjava AudioLoop [-D] [-M ] [-e ] [-i ]");
}