Gives access to the actual data read from the CaptureDevice when audio mixing. Done in order to make it available to sound level indicator functionality.

cusax-fix
Lyubomir Marinov 16 years ago
parent e5676f0ee3
commit 670e6a34ee

@ -269,6 +269,25 @@ void connect()
}
}
/**
* Creates a new <tt>InputStreamDesc</tt> instance which is to describe a
* specific input <tt>SourceStream</tt> originating from a specific input
* <tt>DataSource</tt> given by its <tt>InputDataSourceDesc</tt>.
*
* @param inputStream the input <tt>SourceStream</tt> to be described by the
* new instance
* @param inputDataSourceDesc the input <tt>DataSource</tt> given by its
* <tt>InputDataSourceDesc</tt> to be described by the new instance
* @return a new <tt>InputStreamDesc</tt> instance which describes the
* specified input <tt>SourceStream</tt> and <tt>DataSource</tt>
*/
private InputStreamDesc createInputStreamDesc(
SourceStream inputStream,
InputDataSourceDesc inputDataSourceDesc)
{
return new InputStreamDesc(inputStream, inputDataSourceDesc);
}
/**
* Creates a new <tt>AudioMixingPushBufferDataSource</tt> which gives
* access to a single audio stream representing the mix of the audio streams
@ -612,7 +631,7 @@ && matches(inputFormat, outputFormat))
if (inputStreamDesc == null)
inputStreamDesc
= new InputStreamDesc(
= createInputStreamDesc(
inputStream,
inputDataSourceDesc);
if (inputStreams.add(inputStreamDesc))
@ -638,7 +657,7 @@ && matches(inputFormat, outputFormat))
if (inputStreamDesc == null)
inputStreamDesc
= new InputStreamDesc(
= createInputStreamDesc(
new PushBufferStreamAdapter(
inputStream,
inputFormat),
@ -659,7 +678,7 @@ && matches(inputFormat, outputFormat))
if (inputStreamDesc == null)
inputStreamDesc
= new InputStreamDesc(
= createInputStreamDesc(
new PullBufferStreamAdapter(
inputStream,
inputFormat),
@ -889,6 +908,29 @@ private boolean matches(Format input, AudioFormat pattern)
((input instanceof AudioFormat) && input.isSameEncoding(pattern));
}
/**
* Reads media from a specific <tt>PushBufferStream</tt> which belongs to
* the <tt>CaptureDevice</tt> of this <tt>AudioMixer</tt> into a specific
* output <tt>Buffer</tt>. The reading from the <tt>CaptureDevice</tt> is
* explicitly separated in order to allow extenders to override and
* customize it.
*
* @param stream the <tt>PushBufferStream</tt> to read media from and known
* to belong to the <tt>CaptureDevice</tt> of this <tt>AudioMixer</tt>
* @param buffer the output <tt>Buffer</tt> in which the media read from the
* specified <tt>stream</tt> is to be written so that it gets returned to
* the caller
* @throws IOException if anything wrong happens while reading from the
* specified <tt>stream</tt>
*/
protected void readCaptureDeviceStream(
PushBufferStream stream,
Buffer buffer)
throws IOException
{
stream.read(buffer);
}
/**
* Reads an integer from a specific series of bytes starting the reading at
* a specific offset in it.
@ -1304,6 +1346,15 @@ public void read(Buffer buffer)
buffer.setData(inputSampleDesc);
buffer.setLength(maxInputSampleCount);
/*
* Convey the timeStamp so that it can be reported by the Buffers of
* the AudioMixingPushBufferStreams when mixes are read from them.
*/
long timeStamp = inputSampleDesc.getTimeStamp();
if (timeStamp != Buffer.TIME_UNKNOWN)
buffer.setTimeStamp(timeStamp);
}
/**
@ -1320,17 +1371,21 @@ public void read(Buffer buffer)
* @param sampleCount the maximum number of samples which the read
* operation should attempt to read from <tt>inputStream</tt> but the
* very <tt>inputStream</tt> may not honor the request
* @return an array of audio samples read from the specified
* <tt>inputStream</tt>
* @param captureDevice <tt>true</tt> if the specified
* <tt>inputStream</tt> is one of the streams of the
* <tt>CaptureDevice</tt> of this <tt>AudioMixer</tt>
* @return a <tt>Buffer</tt> which contains the array of <tt>int</tt>
* audio samples read from the specified <tt>inputStream</tt>
* @throws IOException if anything wrong happens while reading
* <tt>inputStream</tt>
* @throws UnsupportedFormatException if converting the samples read
* from <tt>inputStream</tt> to <tt>outputFormat</tt> fails
*/
private int[] read(
private Buffer read(
PushBufferStream inputStream,
AudioFormat outputFormat,
int sampleCount)
int sampleCount,
boolean captureDevice)
throws IOException,
UnsupportedFormatException
{
@ -1358,9 +1413,19 @@ private int[] read(
"!Format.getDataType().equals(byte[].class)",
inputStreamFormat);
}
inputStream.read(buffer);
if (captureDevice)
readCaptureDeviceStream(inputStream, buffer);
else
inputStream.read(buffer);
/*
* If the media is to be discarded, don't even bother with the
* checks and the conversion.
*/
if (buffer.isDiscard())
return null;
int inputLength = buffer.getLength();
if (inputLength <= 0)
@ -1372,8 +1437,8 @@ private int[] read(
inputFormat = inputStreamFormat;
if (logger.isTraceEnabled()
&& ((lastReadInputFormat == null)
|| !lastReadInputFormat.matches(inputFormat)))
&& (lastReadInputFormat != null)
&& !lastReadInputFormat.matches(inputFormat))
{
lastReadInputFormat = inputFormat;
logger
@ -1466,7 +1531,11 @@ private int[] read(
outputSamples[i] = sample;
}
return outputSamples;
buffer.setData(outputSamples);
buffer.setFormat(outputFormat);
buffer.setLength(outputSamples.length);
buffer.setOffset(0);
return buffer;
case 32:
outputSamples = new int[inputSamples.length / 4];
for (int i = 0; i < outputSamples.length; i++)
@ -1491,7 +1560,11 @@ private int[] read(
outputSamples[i] = sample;
}
return outputSamples;
buffer.setData(outputSamples);
buffer.setFormat(outputFormat);
buffer.setLength(outputSamples.length);
buffer.setOffset(0);
return buffer;
case 8:
case 24:
default:
@ -1588,11 +1661,17 @@ private int readInputPushBufferStreams(
if (inputStream instanceof PushBufferStream)
{
int[] inputStreamSamples
Buffer inputStreamBuffer
= read(
(PushBufferStream) inputStream,
outputFormat,
maxInputSampleCount);
maxInputSampleCount,
inputStreamDesc.getInputDataSource()
== captureDevice);
int[] inputStreamSamples
= (inputStreamBuffer == null)
? null
: (int[]) inputStreamBuffer.getData();
int inputStreamSampleCount;
if (inputStreamSamples != null)
@ -1604,6 +1683,19 @@ private int readInputPushBufferStreams(
if (maxInputSampleCount < inputStreamSampleCount)
maxInputSampleCount = inputStreamSampleCount;
/*
* Convey the timeStamp so that it can be set to the
* Buffers of the AudioMixingPushBufferStreams when
* mixes are read from them. Since the inputStreams
* will report different timeStamps, only use the
* first meaningful timestamp for now.
*/
if (inputSampleDesc.getTimeStamp()
== Buffer.TIME_UNKNOWN)
inputSampleDesc
.setTimeStamp(
inputStreamBuffer.getTimeStamp());
}
else if (logger.isTraceEnabled())
inputStreamDesc.nonContributingReadCount++;
@ -1692,7 +1784,11 @@ private void setInputSamples(
inputSamples[i] = null;
}
outputStream.setInputSamples(inputSamples, maxInputSampleCount);
outputStream
.setInputSamples(
inputSamples,
maxInputSampleCount,
inputSampleDesc.getTimeStamp());
}
/**
@ -2022,6 +2118,13 @@ private static class InputSampleDesc
*/
public final InputStreamDesc[] inputStreams;
/**
* The time stamp of <tt>inputSamples</tt> to be reported in the
* <tt>Buffer</tt>s of the <tt>AudioMixingPushBufferStream</tt>s when
* mixes are read from them.
*/
private long timeStamp = Buffer.TIME_UNKNOWN;
/**
* Initializes a new <tt>InputSampleDesc</tt> instance which is to
* describe a specific set of audio samples read from a specific set of
@ -2039,6 +2142,45 @@ public InputSampleDesc(
this.inputSamples = inputSamples;
this.inputStreams = inputStreams;
}
/**
* Gets the time stamp of <tt>inputSamples</tt> to be reported in the
* <tt>Buffer</tt>s of the <tt>AudioMixingPushBufferStream</tt>s when
* mixes are read from them.
*
* @return the time stamp of <tt>inputSamples</tt> to be reported in the
* <tt>Buffer</tt>s of the <tt>AudioMixingPushBufferStream</tt>s when
* mixes are read from them
*/
public long getTimeStamp()
{
return timeStamp;
}
/**
* Sets the time stamp of <tt>inputSamples</tt> to be reported in the
* <tt>Buffer</tt>s of the <tt>AudioMixingPushBufferStream</tt>s when
* mixes are read from them.
*
* @param timeStamp the time stamp of <tt>inputSamples</tt> to be
* reported in the <tt>Buffer</tt>s of the
* <tt>AudioMixingPushBufferStream</tt>s when mixes are read from them
*/
public void setTimeStamp(long timeStamp)
{
if (this.timeStamp == Buffer.TIME_UNKNOWN)
this.timeStamp = timeStamp;
else
{
/*
* Setting the timeStamp more than once does not make sense
* because the inputStreams will report different timeStamps so
* only one should be picked up where the very reading from
* inputStreams takes place.
*/
throw new IllegalStateException("timeStamp");
}
}
}
/**
@ -2092,6 +2234,22 @@ public InputStreamDesc(
this.inputDataSourceDesc = inputDataSourceDesc;
}
/**
* Gets the input <tt>DataSource</tt> which caused {@link #inputStream}
* to exist. If input <tt>DataSource</tt> is not transcoded for the
* purposes of the audio mixing, it has directly provided
* <tt>inputStream</tt>. Otherwise, it has been wrapped in a
* <tt>TranscodingDataSource</tt> the latter has provided
* <tt>inputStream</tt>.
*
* @return the input <tt>DataSource</tt> which caused
* <tt>inputStream</tt> to exist
*/
public DataSource getInputDataSource()
{
return inputDataSourceDesc.inputDataSource;
}
/**
* Gets the <tt>SourceStream</tt> described by this instance
*

@ -64,6 +64,12 @@ public class AudioMixingPushBufferStream
*/
private boolean started;
/**
* The time stamp of {@link #inputSamples} to be reported in the specified
* <tt>Buffer</tt> when data is read from this instance.
*/
private long timeStamp;
/**
* The <tt>BufferTransferHandler</tt> through which this
* <tt>PushBufferStream</tt> notifies its clients that new data is
@ -223,6 +229,27 @@ private static int[] mix(
int outputSampleCount)
{
int[] outputSamples = new int[outputSampleCount];
/*
* The trivial case of performing audio mixing the audio of a single
* stream. Then there is nothing to mix and the input becomes the
* output.
*/
if (inputSamples.length == 1)
{
int[] inputStreamSamples = inputSamples[0];
if (inputStreamSamples != null)
System
.arraycopy(
inputStreamSamples,
0,
outputSamples,
0,
inputStreamSamples.length);
return outputSamples;
}
int maxOutputSample;
try
@ -262,10 +289,15 @@ private static int[] mix(
return outputSamples;
}
/*
* Implements PushBufferStream#read(Buffer). If inputSamples are available,
* mixes them and writes them to the specified Buffer performing the
* necessary data type conversions.
/**
* Implements {@link PushBufferStream#read(Buffer)}. If
* <tt>inputSamples</tt> are available, mixes them and writes the mix to the
* specified <tt>Buffer</tt> performing the necessary data type conversions.
*
* @param buffer the <tt>Buffer</tt> to receive the data read from this
* instance
* @throws IOException if anything wrong happens while reading from this
* instance
*/
public void read(Buffer buffer)
throws IOException
@ -273,9 +305,11 @@ public void read(Buffer buffer)
int[][] inputSamples = this.inputSamples;
int inputSampleCount = (inputSamples == null) ? 0 : inputSamples.length;
int maxInputSampleCount = this.maxInputSampleCount;
long timeStamp = this.timeStamp;
this.inputSamples = null;
this.maxInputSampleCount = 0;
this.timeStamp = Buffer.TIME_UNKNOWN;
if ((inputSampleCount == 0)
|| (maxInputSampleCount <= 0))
@ -315,6 +349,7 @@ public void read(Buffer buffer)
buffer.setFormat(outputFormat);
buffer.setLength(outputData.length);
buffer.setOffset(0);
buffer.setTimeStamp(timeStamp);
}
else
throw
@ -328,11 +363,16 @@ public void read(Buffer buffer)
* the clients of this stream.
*
* @param inputSamples the collection of audio sample sets to be mixed by
* this stream when data is read from it
* this stream when data is read from it
* @param maxInputSampleCount the maximum number of per-stream audio samples
* available through <tt>inputSamples</tt>
* available through <tt>inputSamples</tt>
* @param timeStamp the time stamp of <tt>inputSamples</tt> to be reported
* in the specified <tt>Buffer</tt> when data is read from this instance
*/
void setInputSamples(int[][] inputSamples, int maxInputSampleCount)
void setInputSamples(
int[][] inputSamples,
int maxInputSampleCount,
long timeStamp)
{
this.inputSamples = inputSamples;
this.maxInputSampleCount = maxInputSampleCount;

@ -6,8 +6,10 @@
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.io.*;
import java.util.*;
import javax.media.*;
import javax.media.protocol.*;
import javax.media.rtp.*;
@ -107,7 +109,23 @@ public synchronized MediaDeviceSession createSession()
private AudioMixer getAudioMixer()
{
if (audioMixer == null)
audioMixer = new AudioMixer(device.getCaptureDevice());
audioMixer = new AudioMixer(device.getCaptureDevice())
{
@Override
protected void readCaptureDeviceStream(
PushBufferStream stream,
Buffer buffer)
throws IOException
{
super.readCaptureDeviceStream(stream, buffer);
/*
* TODO Data from the CaptureDevice of the AudioMixer is
* available here and has not been made available for audio
* mixing yet. Process it as necessary.
*/
}
};
return audioMixer;
}

@ -243,12 +243,18 @@ protected void addReceiveStream(
if (exception != null)
logger
.error(
"Failed to create player for new receive stream "
+ receiveStream,
"Failed to create player"
+ " for ReceiveStream with ssrc "
+ receiveStream.getSSRC(),
exception);
else if (!waitForState(player, Processor.Configured))
logger
.error(
"Failed to configure player"
+ " for ReceiveStream with ssrc "
+ receiveStream.getSSRC());
else
{
waitForState(player, Processor.Configured);
// // here we add sound level indicator for every incoming
// //stream
// try
@ -276,21 +282,28 @@ protected void addReceiveStream(
// content descriptor to null
player.setContentDescriptor(null);
waitForState(player, Processor.Realized);
if (waitForState(player, Processor.Realized))
{
player.start();
player.start();
realizeComplete(player);
realizeComplete(player);
if (logger.isTraceEnabled())
logger
.trace(
"Created Player with hashCode "
+ player.hashCode()
+ " for ReceiveStream with ssrc "
+ receiveStream.getSSRC());
if (logger.isTraceEnabled())
players.put(receiveStreamDataSource, player);
}
else
logger
.trace(
"Created Player with hashCode "
+ player.hashCode()
.error(
"Failed to realize player"
+ " for ReceiveStream with ssrc "
+ receiveStream.getSSRC());
players.put(receiveStreamDataSource, player);
}
}
}

Loading…
Cancel
Save