diff --git a/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixer.java b/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixer.java
index 33db26f2e..435e10057 100644
--- a/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixer.java
+++ b/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixer.java
@@ -269,6 +269,25 @@ void connect()
}
}
+ /**
+ * Creates a new InputStreamDesc instance which is to describe a
+ * specific input SourceStream originating from a specific input
+ * DataSource given by its InputDataSourceDesc.
+ *
+ * @param inputStream the input SourceStream to be described by the
+ * new instance
+ * @param inputDataSourceDesc the input DataSource given by its
+ * InputDataSourceDesc to be described by the new instance
+ * @return a new InputStreamDesc instance which describes the
+ * specified input SourceStream and DataSource
+ */
+ private InputStreamDesc createInputStreamDesc(
+ SourceStream inputStream,
+ InputDataSourceDesc inputDataSourceDesc)
+ {
+ return new InputStreamDesc(inputStream, inputDataSourceDesc);
+ }
+
/**
* Creates a new AudioMixingPushBufferDataSource which gives
* access to a single audio stream representing the mix of the audio streams
@@ -612,7 +631,7 @@ && matches(inputFormat, outputFormat))
if (inputStreamDesc == null)
inputStreamDesc
- = new InputStreamDesc(
+ = createInputStreamDesc(
inputStream,
inputDataSourceDesc);
if (inputStreams.add(inputStreamDesc))
@@ -638,7 +657,7 @@ && matches(inputFormat, outputFormat))
if (inputStreamDesc == null)
inputStreamDesc
- = new InputStreamDesc(
+ = createInputStreamDesc(
new PushBufferStreamAdapter(
inputStream,
inputFormat),
@@ -659,7 +678,7 @@ && matches(inputFormat, outputFormat))
if (inputStreamDesc == null)
inputStreamDesc
- = new InputStreamDesc(
+ = createInputStreamDesc(
new PullBufferStreamAdapter(
inputStream,
inputFormat),
@@ -889,6 +908,29 @@ private boolean matches(Format input, AudioFormat pattern)
((input instanceof AudioFormat) && input.isSameEncoding(pattern));
}
+ /**
+ * Reads media from a specific PushBufferStream which belongs to
+ * the CaptureDevice of this AudioMixer into a specific
+ * output Buffer. The reading from the CaptureDevice is
+ * explicitly separated in order to allow extenders to override and
+ * customize it.
+ *
+ * @param stream the PushBufferStream to read media from and known
+ * to belong to the CaptureDevice of this AudioMixer
+ * @param buffer the output Buffer in which the media read from the
+ * specified stream is to be written so that it gets returned to
+ * the caller
+ * @throws IOException if anything wrong happens while reading from the
+ * specified stream
+ */
+ protected void readCaptureDeviceStream(
+ PushBufferStream stream,
+ Buffer buffer)
+ throws IOException
+ {
+ stream.read(buffer);
+ }
+
/**
* Reads an integer from a specific series of bytes starting the reading at
* a specific offset in it.
@@ -1304,6 +1346,15 @@ public void read(Buffer buffer)
buffer.setData(inputSampleDesc);
buffer.setLength(maxInputSampleCount);
+
+ /*
+ * Convey the timeStamp so that it can be reported by the Buffers of
+ * the AudioMixingPushBufferStreams when mixes are read from them.
+ */
+ long timeStamp = inputSampleDesc.getTimeStamp();
+
+ if (timeStamp != Buffer.TIME_UNKNOWN)
+ buffer.setTimeStamp(timeStamp);
}
/**
@@ -1320,17 +1371,21 @@ public void read(Buffer buffer)
* @param sampleCount the maximum number of samples which the read
* operation should attempt to read from inputStream but the
* very inputStream may not honor the request
- * @return an array of audio samples read from the specified
- * inputStream
+ * @param captureDevice true if the specified
+ * inputStream is one of the streams of the
+ * CaptureDevice of this AudioMixer
+ * @return a Buffer which contains the array of int
+ * audio samples read from the specified inputStream
* @throws IOException if anything wrong happens while reading
* inputStream
* @throws UnsupportedFormatException if converting the samples read
* from inputStream to outputFormat fails
*/
- private int[] read(
+ private Buffer read(
PushBufferStream inputStream,
AudioFormat outputFormat,
- int sampleCount)
+ int sampleCount,
+ boolean captureDevice)
throws IOException,
UnsupportedFormatException
{
@@ -1358,9 +1413,19 @@ private int[] read(
"!Format.getDataType().equals(byte[].class)",
inputStreamFormat);
}
-
- inputStream.read(buffer);
-
+
+ if (captureDevice)
+ readCaptureDeviceStream(inputStream, buffer);
+ else
+ inputStream.read(buffer);
+
+ /*
+ * If the media is to be discarded, don't even bother with the
+ * checks and the conversion.
+ */
+ if (buffer.isDiscard())
+ return null;
+
int inputLength = buffer.getLength();
if (inputLength <= 0)
@@ -1372,8 +1437,8 @@ private int[] read(
inputFormat = inputStreamFormat;
if (logger.isTraceEnabled()
- && ((lastReadInputFormat == null)
- || !lastReadInputFormat.matches(inputFormat)))
+ && (lastReadInputFormat != null)
+ && !lastReadInputFormat.matches(inputFormat))
{
lastReadInputFormat = inputFormat;
logger
@@ -1466,7 +1531,11 @@ private int[] read(
outputSamples[i] = sample;
}
- return outputSamples;
+ buffer.setData(outputSamples);
+ buffer.setFormat(outputFormat);
+ buffer.setLength(outputSamples.length);
+ buffer.setOffset(0);
+ return buffer;
case 32:
outputSamples = new int[inputSamples.length / 4];
for (int i = 0; i < outputSamples.length; i++)
@@ -1491,7 +1560,11 @@ private int[] read(
outputSamples[i] = sample;
}
- return outputSamples;
+ buffer.setData(outputSamples);
+ buffer.setFormat(outputFormat);
+ buffer.setLength(outputSamples.length);
+ buffer.setOffset(0);
+ return buffer;
case 8:
case 24:
default:
@@ -1588,11 +1661,17 @@ private int readInputPushBufferStreams(
if (inputStream instanceof PushBufferStream)
{
- int[] inputStreamSamples
+ Buffer inputStreamBuffer
= read(
(PushBufferStream) inputStream,
outputFormat,
- maxInputSampleCount);
+ maxInputSampleCount,
+ inputStreamDesc.getInputDataSource()
+ == captureDevice);
+ int[] inputStreamSamples
+ = (inputStreamBuffer == null)
+ ? null
+ : (int[]) inputStreamBuffer.getData();
int inputStreamSampleCount;
if (inputStreamSamples != null)
@@ -1604,6 +1683,19 @@ private int readInputPushBufferStreams(
if (maxInputSampleCount < inputStreamSampleCount)
maxInputSampleCount = inputStreamSampleCount;
+
+ /*
+ * Convey the timeStamp so that it can be set to the
+ * Buffers of the AudioMixingPushBufferStreams when
+ * mixes are read from them. Since the inputStreams
+ * will report different timeStamps, only use the
+ * first meaningful timestamp for now.
+ */
+ if (inputSampleDesc.getTimeStamp()
+ == Buffer.TIME_UNKNOWN)
+ inputSampleDesc
+ .setTimeStamp(
+ inputStreamBuffer.getTimeStamp());
}
else if (logger.isTraceEnabled())
inputStreamDesc.nonContributingReadCount++;
@@ -1692,7 +1784,11 @@ private void setInputSamples(
inputSamples[i] = null;
}
- outputStream.setInputSamples(inputSamples, maxInputSampleCount);
+ outputStream
+ .setInputSamples(
+ inputSamples,
+ maxInputSampleCount,
+ inputSampleDesc.getTimeStamp());
}
/**
@@ -2022,6 +2118,13 @@ private static class InputSampleDesc
*/
public final InputStreamDesc[] inputStreams;
+ /**
+ * The time stamp of inputSamples to be reported in the
+ * Buffers of the AudioMixingPushBufferStreams when
+ * mixes are read from them.
+ */
+ private long timeStamp = Buffer.TIME_UNKNOWN;
+
/**
* Initializes a new InputSampleDesc instance which is to
* describe a specific set of audio samples read from a specific set of
@@ -2039,6 +2142,45 @@ public InputSampleDesc(
this.inputSamples = inputSamples;
this.inputStreams = inputStreams;
}
+
+ /**
+ * Gets the time stamp of inputSamples to be reported in the
+ * Buffers of the AudioMixingPushBufferStreams when
+ * mixes are read from them.
+ *
+ * @return the time stamp of inputSamples to be reported in the
+ * Buffers of the AudioMixingPushBufferStreams when
+ * mixes are read from them
+ */
+ public long getTimeStamp()
+ {
+ return timeStamp;
+ }
+
+ /**
+ * Sets the time stamp of inputSamples to be reported in the
+ * Buffers of the AudioMixingPushBufferStreams when
+ * mixes are read from them.
+ *
+ * @param timeStamp the time stamp of inputSamples to be
+ * reported in the Buffers of the
+ * AudioMixingPushBufferStreams when mixes are read from them
+ */
+ public void setTimeStamp(long timeStamp)
+ {
+ if (this.timeStamp == Buffer.TIME_UNKNOWN)
+ this.timeStamp = timeStamp;
+ else
+ {
+ /*
+ * Setting the timeStamp more than once does not make sense
+ * because the inputStreams will report different timeStamps so
+ * only one should be picked up where the very reading from
+ * inputStreams takes place.
+ */
+ throw new IllegalStateException("timeStamp");
+ }
+ }
}
/**
@@ -2092,6 +2234,22 @@ public InputStreamDesc(
this.inputDataSourceDesc = inputDataSourceDesc;
}
+ /**
+ * Gets the input DataSource which caused {@link #inputStream}
+ * to exist. If input DataSource is not transcoded for the
+ * purposes of the audio mixing, it has directly provided
+ * inputStream. Otherwise, it has been wrapped in a
+ * TranscodingDataSource the latter has provided
+ * inputStream.
+ *
+ * @return the input DataSource which caused
+ * inputStream to exist
+ */
+ public DataSource getInputDataSource()
+ {
+ return inputDataSourceDesc.inputDataSource;
+ }
+
/**
* Gets the SourceStream described by this instance
*
diff --git a/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixingPushBufferStream.java b/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixingPushBufferStream.java
index ac6008c57..29451aafd 100644
--- a/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixingPushBufferStream.java
+++ b/src/net/java/sip/communicator/impl/neomedia/conference/AudioMixingPushBufferStream.java
@@ -64,6 +64,12 @@ public class AudioMixingPushBufferStream
*/
private boolean started;
+ /**
+ * The time stamp of {@link #inputSamples} to be reported in the specified
+ * Buffer when data is read from this instance.
+ */
+ private long timeStamp;
+
/**
* The BufferTransferHandler through which this
* PushBufferStream notifies its clients that new data is
@@ -223,6 +229,27 @@ private static int[] mix(
int outputSampleCount)
{
int[] outputSamples = new int[outputSampleCount];
+
+ /*
+ * The trivial case of performing audio mixing the audio of a single
+ * stream. Then there is nothing to mix and the input becomes the
+ * output.
+ */
+ if (inputSamples.length == 1)
+ {
+ int[] inputStreamSamples = inputSamples[0];
+
+ if (inputStreamSamples != null)
+ System
+ .arraycopy(
+ inputStreamSamples,
+ 0,
+ outputSamples,
+ 0,
+ inputStreamSamples.length);
+ return outputSamples;
+ }
+
int maxOutputSample;
try
@@ -262,10 +289,15 @@ private static int[] mix(
return outputSamples;
}
- /*
- * Implements PushBufferStream#read(Buffer). If inputSamples are available,
- * mixes them and writes them to the specified Buffer performing the
- * necessary data type conversions.
+ /**
+ * Implements {@link PushBufferStream#read(Buffer)}. If
+ * inputSamples are available, mixes them and writes the mix to the
+ * specified Buffer performing the necessary data type conversions.
+ *
+ * @param buffer the Buffer to receive the data read from this
+ * instance
+ * @throws IOException if anything wrong happens while reading from this
+ * instance
*/
public void read(Buffer buffer)
throws IOException
@@ -273,9 +305,11 @@ public void read(Buffer buffer)
int[][] inputSamples = this.inputSamples;
int inputSampleCount = (inputSamples == null) ? 0 : inputSamples.length;
int maxInputSampleCount = this.maxInputSampleCount;
+ long timeStamp = this.timeStamp;
this.inputSamples = null;
this.maxInputSampleCount = 0;
+ this.timeStamp = Buffer.TIME_UNKNOWN;
if ((inputSampleCount == 0)
|| (maxInputSampleCount <= 0))
@@ -315,6 +349,7 @@ public void read(Buffer buffer)
buffer.setFormat(outputFormat);
buffer.setLength(outputData.length);
buffer.setOffset(0);
+ buffer.setTimeStamp(timeStamp);
}
else
throw
@@ -328,11 +363,16 @@ public void read(Buffer buffer)
* the clients of this stream.
*
* @param inputSamples the collection of audio sample sets to be mixed by
- * this stream when data is read from it
+ * this stream when data is read from it
* @param maxInputSampleCount the maximum number of per-stream audio samples
- * available through inputSamples
+ * available through inputSamples
+ * @param timeStamp the time stamp of inputSamples to be reported
+ * in the specified Buffer when data is read from this instance
*/
- void setInputSamples(int[][] inputSamples, int maxInputSampleCount)
+ void setInputSamples(
+ int[][] inputSamples,
+ int maxInputSampleCount,
+ long timeStamp)
{
this.inputSamples = inputSamples;
this.maxInputSampleCount = maxInputSampleCount;
diff --git a/src/net/java/sip/communicator/impl/neomedia/device/AudioMixerMediaDevice.java b/src/net/java/sip/communicator/impl/neomedia/device/AudioMixerMediaDevice.java
index e39402aa9..5f609e7ca 100644
--- a/src/net/java/sip/communicator/impl/neomedia/device/AudioMixerMediaDevice.java
+++ b/src/net/java/sip/communicator/impl/neomedia/device/AudioMixerMediaDevice.java
@@ -6,8 +6,10 @@
*/
package net.java.sip.communicator.impl.neomedia.device;
+import java.io.*;
import java.util.*;
+import javax.media.*;
import javax.media.protocol.*;
import javax.media.rtp.*;
@@ -107,7 +109,23 @@ public synchronized MediaDeviceSession createSession()
private AudioMixer getAudioMixer()
{
if (audioMixer == null)
- audioMixer = new AudioMixer(device.getCaptureDevice());
+ audioMixer = new AudioMixer(device.getCaptureDevice())
+ {
+ @Override
+ protected void readCaptureDeviceStream(
+ PushBufferStream stream,
+ Buffer buffer)
+ throws IOException
+ {
+ super.readCaptureDeviceStream(stream, buffer);
+
+ /*
+ * TODO Data from the CaptureDevice of the AudioMixer is
+ * available here and has not been made available for audio
+ * mixing yet. Process it as necessary.
+ */
+ }
+ };
return audioMixer;
}
diff --git a/src/net/java/sip/communicator/impl/neomedia/device/MediaDeviceSession.java b/src/net/java/sip/communicator/impl/neomedia/device/MediaDeviceSession.java
index 7895d6494..f663c83a3 100644
--- a/src/net/java/sip/communicator/impl/neomedia/device/MediaDeviceSession.java
+++ b/src/net/java/sip/communicator/impl/neomedia/device/MediaDeviceSession.java
@@ -243,12 +243,18 @@ protected void addReceiveStream(
if (exception != null)
logger
.error(
- "Failed to create player for new receive stream "
- + receiveStream,
+ "Failed to create player"
+ + " for ReceiveStream with ssrc "
+ + receiveStream.getSSRC(),
exception);
+ else if (!waitForState(player, Processor.Configured))
+ logger
+ .error(
+ "Failed to configure player"
+ + " for ReceiveStream with ssrc "
+ + receiveStream.getSSRC());
else
{
- waitForState(player, Processor.Configured);
// // here we add sound level indicator for every incoming
// //stream
// try
@@ -276,21 +282,28 @@ protected void addReceiveStream(
// content descriptor to null
player.setContentDescriptor(null);
- waitForState(player, Processor.Realized);
+ if (waitForState(player, Processor.Realized))
+ {
+ player.start();
- player.start();
+ realizeComplete(player);
- realizeComplete(player);
+ if (logger.isTraceEnabled())
+ logger
+ .trace(
+ "Created Player with hashCode "
+ + player.hashCode()
+ + " for ReceiveStream with ssrc "
+ + receiveStream.getSSRC());
- if (logger.isTraceEnabled())
+ players.put(receiveStreamDataSource, player);
+ }
+ else
logger
- .trace(
- "Created Player with hashCode "
- + player.hashCode()
+ .error(
+ "Failed to realize player"
+ " for ReceiveStream with ssrc "
+ receiveStream.getSSRC());
-
- players.put(receiveStreamDataSource, player);
}
}
}