Applies further refinements to the neomedia implementation and extensions to its functionality. Attempts to implement audio mixing.

cusax-fix
Lyubomir Marinov 16 years ago
parent 830a31af7e
commit 06c090a2a6

@ -1058,8 +1058,6 @@
prefix="net/java/sip/communicator/impl/media/codec/audio"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/video"
prefix="net/java/sip/communicator/impl/media/codec/video"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/conference"
prefix="net/java/sip/communicator/impl/media/conference"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/protocol"
prefix="net/java/sip/communicator/impl/media/protocol"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/renderer"
@ -1069,7 +1067,6 @@
<zipfileset src="${lib.win.noinst}/jmf.jar" prefix=""/>
<zipfileset src="${lib.win.noinst}/sound.jar" prefix=""/>
<zipfileset src="${lib.noinst}/fmj.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jain-sdp.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jspeex.jar" prefix=""/>
<zipfileset src="${lib.noinst}/lti-civil-no_s_w_t.jar" prefix=""/>
</jar>
@ -1094,8 +1091,6 @@
prefix="net/java/sip/communicator/impl/media/codec/audio"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/video"
prefix="net/java/sip/communicator/impl/media/codec/video"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/conference"
prefix="net/java/sip/communicator/impl/media/conference"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/protocol"
prefix="net/java/sip/communicator/impl/media/protocol"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/renderer"
@ -1104,7 +1099,6 @@
prefix="resources/images/impl/media"/>
<zipfileset src="${lib.lin.noinst}/jmf.jar" prefix=""/>
<zipfileset src="${lib.noinst}/fmj.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jain-sdp.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jspeex.jar" prefix=""/>
<zipfileset src="${lib.noinst}/lti-civil-no_s_w_t.jar" prefix=""/>
</jar>
@ -1129,8 +1123,6 @@
prefix="net/java/sip/communicator/impl/media/codec/audio"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/video"
prefix="net/java/sip/communicator/impl/media/codec/video"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/conference"
prefix="net/java/sip/communicator/impl/media/conference"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/protocol"
prefix="net/java/sip/communicator/impl/media/protocol"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/renderer"
@ -1139,7 +1131,6 @@
prefix="resources/images/impl/media"/>
<zipfileset src="${lib.mac.noinst}/jmf.jar" prefix=""/>
<zipfileset src="${lib.noinst}/fmj.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jain-sdp.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jspeex.jar" prefix=""/>
<zipfileset src="${lib.noinst}/lti-civil-no_s_w_t.jar" prefix=""/>
</jar>

@ -0,0 +1,91 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
/**
* Implements functionality aiding the reading and writing of <tt>byte</tt>
* arrays and primitive types such as <tt>short</tt>.
*
* @author Lubomir Marinov
*/
public class ArrayIOUtils
{
/**
* Reads a short integer from a specific series of bytes starting the
* reading at a specific offset in it. The difference with
* {@link #readShort(byte[], int)} is that the read short integer is an
* <tt>int</tt> which has been formed by reading two bytes, not a
* <tt>short</tt>.
*
* @param input the series of bytes to read the short integer from
* @param inputOffset the offset in <tt>input</tt> at which the reading of
* the short integer is to start
* @return a short integer in the form of <tt>int</tt> read from the
* specified series of bytes starting at the specified offset in it
*/
public static int readInt16(byte[] input, int inputOffset)
{
return ((input[inputOffset + 1] << 8) | (input[inputOffset] & 0x00FF));
}
/**
* Reads a short integer from a specific series of bytes starting the
* reading at a specific offset in it.
*
* @param input the series of bytes to read the short integer from
* @param inputOffset the offset in <tt>input</tt> at which the reading of
* the short integer is to start
* @return a short integer in the form of <tt>short</tt> read from the
* specified series of bytes starting at the specified offset in it
*/
public static short readShort(byte[] input, int inputOffset)
{
return
(short)
((input[inputOffset + 1] << 8)
| (input[inputOffset] & 0x00FF));
}
/**
* Converts a short integer to a series of bytes and writes the result into
* a specific output array of bytes starting the writing at a specific
* offset in it. The difference with {@link #writeShort(short, byte[], int)
* is that the input is an <tt>int</tt> and just two bytes of it are
* written.
*
* @param input the short integer to be written out as a series of bytes
* specified as an integer i.e. the value to be converted is contained in
* only two of the four bytes made available by the integer
* @param output the output to receive the conversion of the specified short
* integer to a series of bytes
* @param outputOffset the offset in <tt>output</tt> at which the writing of
* the result of the conversion is to be started
*/
public static void writeInt16(int input, byte[] output, int outputOffset)
{
output[outputOffset] = (byte) (input & 0xFF);
output[outputOffset + 1] = (byte) (input >> 8);
}
/**
* Converts a short integer to a series of bytes and writes the result into
* a specific output array of bytes starting the writing at a specific
* offset in it.
*
* @param input the short integer to be written out as a series of bytes
* specified as <tt>short</tt>
* @param output the output to receive the conversion of the specified short
* integer to a series of bytes
* @param outputOffset the offset in <tt>output</tt> at which the writing of
* the result of the conversion is to be started
*/
public static void writeShort(short input, byte[] output, int outputOffset)
{
writeInt16(input, output, outputOffset);
}
}

@ -119,7 +119,7 @@ public Object getValueAt(int rowIndex, int columnIndex)
case 0:
return (encodingConfiguration.getPriority(encoding) > 0);
case 1:
return MediaUtils.sdpToJmfEncoding(encoding);
return MediaUtils.rtpPayloadTypeToJmfEncoding(encoding);
default:
return null;
}

@ -45,6 +45,14 @@ public class MediaServiceImpl
private final DeviceConfiguration deviceConfiguration
= new DeviceConfiguration();
/**
* The list of audio <tt>MediaDevice</tt>s reported by this instance when
* its {@link MediaService#getDevices(MediaType)} method is called with an
* argument {@link MediaType#AUDIO}.
*/
private final List<CaptureMediaDevice> audioDevices
= new ArrayList<CaptureMediaDevice>();
/**
* The format-related user choices such as the enabled and disabled codecs
* and the order of their preference.
@ -52,6 +60,14 @@ public class MediaServiceImpl
private final EncodingConfiguration encodingConfiguration
= new EncodingConfiguration();
/**
* The list of video <tt>MediaDevice</tt>s reported by this instance when
* its {@link MediaService#getDevices(MediaType)} method is called with an
* argument {@link MediaType#VIDEO}.
*/
private final List<CaptureMediaDevice> videoDevices
= new ArrayList<CaptureMediaDevice>();
/*
* Implements MediaService#createMediaStream(StreamConnector, MediaDevice).
*/
@ -105,61 +121,99 @@ public MediaDevice getDefaultDevice(MediaType mediaType)
* @return the <tt>CaptureDevice</tt> user choices such as the default audio
* and video capture devices.
*/
DeviceConfiguration getDeviceConfiguration()
public DeviceConfiguration getDeviceConfiguration()
{
return deviceConfiguration;
}
/*
* Implements MediaService#getDevices(MediaType).
/**
* Gets a list of the <tt>MediaDevice</tt>s known to this
* <tt>MediaService</tt> and handling the specified <tt>MediaType</tt>.
*
* @param mediaType the <tt>MediaType</tt> to obtain the
* <tt>MediaDevice</tt> list for
* @return a new <tt>List</tt> of <tt>MediaDevice</tt>s known to this
* <tt>MediaService</tt> and handling the specified <tt>MediaType</tt>. The
* returned <tt>List</tt> is a copy of the internal storage and,
* consequently, modifications to it do not affect this instance. Despite
* the fact that a new <tt>List</tt> instance is returned by each call to
* this method, the <tt>MediaDevice</tt> instances are the same if they are
* still known to this <tt>MediaService</tt> to be available.
* @see MediaService#getDevices(MediaType)
*/
public List<MediaDevice> getDevices(MediaType mediaType)
{
CaptureDeviceInfo[] captureDeviceInfos;
List<CaptureMediaDevice> devices;
switch (mediaType)
{
case AUDIO:
captureDeviceInfos
= getDeviceConfiguration().getAvailableAudioCaptureDevices();
devices = audioDevices;
break;
case VIDEO:
captureDeviceInfos
= getDeviceConfiguration().getAvailableVideoCaptureDevices();
devices = videoDevices;
break;
default:
captureDeviceInfos = null;
devices = null;
break;
}
List<MediaDevice> captureDevices;
if ((captureDeviceInfos == null) || (captureDeviceInfos.length == 0))
captureDevices = EMPTY_DEVICES;
else
synchronized (devices)
{
captureDevices
= new ArrayList<MediaDevice>(captureDeviceInfos.length);
if ((captureDeviceInfos == null) || (captureDeviceInfos.length == 0))
{
devices.clear();
return EMPTY_DEVICES;
}
Iterator<CaptureMediaDevice> deviceIter = devices.iterator();
while (deviceIter.hasNext())
{
CaptureDeviceInfo captureDeviceInfo
= deviceIter.next().getCaptureDeviceInfo();
boolean deviceIsFound = false;
for (int i = 0; i < captureDeviceInfos.length; i++)
if (captureDeviceInfo.equals(captureDeviceInfos[i]))
{
deviceIsFound = true;
captureDeviceInfos[i] = null;
break;
}
if (!deviceIsFound)
deviceIter.remove();
}
for (CaptureDeviceInfo captureDeviceInfo : captureDeviceInfos)
{
MediaDevice captureDevice;
if (captureDeviceInfo == null)
continue;
CaptureMediaDevice device;
switch (mediaType)
{
case AUDIO:
captureDevice
= new AudioCaptureMediaDevice(captureDeviceInfo);
device = new AudioCaptureMediaDevice(captureDeviceInfo);
break;
case VIDEO:
default:
captureDevice
device
= new CaptureMediaDevice(captureDeviceInfo, mediaType);
break;
}
captureDevices.add(captureDevice);
devices.add(device);
}
return new ArrayList<MediaDevice>(devices);
}
return captureDevices;
}
/**
@ -169,7 +223,7 @@ public List<MediaDevice> getDevices(MediaType mediaType)
* @return the format-related user choices such as the enabled and disabled
* codecs and the order of their preference
*/
EncodingConfiguration getEncodingConfiguration()
public EncodingConfiguration getEncodingConfiguration()
{
return encodingConfiguration;
}

@ -10,7 +10,6 @@
import java.net.*;
import java.util.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import javax.media.protocol.*;
@ -24,12 +23,13 @@
import net.java.sip.communicator.util.*;
/**
* Implements <tt>MediaStream</tt> using JMF.
*
* @author Lubomir Marinov
*/
public class MediaStreamImpl
extends AbstractMediaStream
implements ControllerListener,
ReceiveStreamListener,
implements ReceiveStreamListener,
SendStreamListener,
SessionListener
{
@ -48,16 +48,17 @@ public class MediaStreamImpl
= "net.java.sip.communicator.impl.media.RECEIVE_BUFFER_LENGTH";
/**
* The <tt>MediaDevice</tt> this instance uses for both capture and playback
* of media.
* The session with the <tt>MediaDevice</tt> this instance uses for both
* capture and playback of media.
*/
private CaptureMediaDevice device;
private MediaDeviceSession deviceSession;
/**
* The list of active <tt>Player</tt>s created during the operation of this
* <tt>MediaStream</tt>.
* The <tt>ReceiveStream</tt>s this instance plays back on its associated
* <tt>MediaDevice</tt>.
*/
protected final List<Player> players = new ArrayList<Player>();
private final List<ReceiveStream> receiveStreams
= new ArrayList<ReceiveStream>();
/**
* The <tt>RTPConnector</tt> through which this instance sends and receives
@ -71,6 +72,15 @@ public class MediaStreamImpl
*/
private RTPManager rtpManager;
/**
* The indicator which determines whether {@link #createSendStreams()} has
* been executed for {@link #rtpManager}. If <tt>true</tt>, the
* <tt>SendStream</tt>s have to be recreated when the <tt>MediaDevice</tt>,
* respectively the <tt>MediaDeviceSession</tt>, of this instance is
* changed.
*/
private boolean sendStreamsAreCreated = false;
/**
* The <tt>MediaDirection</tt> in which this instance is started. For
* example, {@link MediaDirection#SENDRECV} if this instances is both
@ -111,6 +121,7 @@ public MediaStreamImpl(StreamConnector connector, MediaDevice device)
public void close()
{
stop();
closeSendStreams();
rtpConnector.removeTargets();
@ -123,33 +134,17 @@ public void close()
rtpManager = null;
}
disposePlayers();
getDevice().close();
if (deviceSession != null)
deviceSession.close();
}
/**
* Notifies this <tt>ControllerListener</tt> that the <tt>Controller</tt>
* which it is registered with has generated an event.
*
* @param event the <tt>ControllerEvent</tt> specifying the
* <tt>Controller</tt> which is the source of the event and the very type of
* the event
* @see ControllerListener#controllerUpdate(ControllerEvent)
* Closes the <tt>SendStream</tt>s this instance is sending to its remote
* peer.
*/
public void controllerUpdate(ControllerEvent event)
private void closeSendStreams()
{
if (event instanceof RealizeCompleteEvent)
{
Player player = (Player) event.getSourceController();
if (player != null)
{
player.start();
realizeComplete(player);
}
}
stopSendStreams(true);
}
/**
@ -159,7 +154,7 @@ public void controllerUpdate(ControllerEvent event)
private void createSendStreams()
{
RTPManager rtpManager = getRTPManager();
DataSource dataSource = getDevice().getDataSource();
DataSource dataSource = getDeviceSession().getOutputDataSource();
int streamCount;
if (dataSource instanceof PushBufferDataSource)
@ -222,36 +217,30 @@ else if (dataSource instanceof PullDataSource)
exception);
}
}
sendStreamsAreCreated = true;
}
/**
* Releases the resources allocated by a specific <tt>Player</tt> in the
* course of its execution and prepares it to be garbage collected.
* Notifies this <tt>MediaStream</tt> that the <tt>MediaDevice</tt> (and
* respectively the <tt>MediaDeviceSession</tt> with it) which this instance
* uses for capture and playback of media has been changed. Allows extenders
* to override and provide additional processing of <tt>oldValue</tt> and
* <tt>newValue</tt>.
*
* @param player the <tt>Player</tt> to dispose of
*/
protected void disposePlayer(Player player)
{
player.stop();
player.deallocate();
player.close();
players.remove(player);
}
/**
* Releases the resources allocated by {@link #players} in the course of
* their execution and prepares them to be garbage collected.
*/
private void disposePlayers()
* @param oldValue the <tt>MediaDeviceSession</tt> with the
* <tt>MediaDevice</tt> this instance used work with
* @param newValue the <tt>MediaDeviceSession</tt> with the
* <tt>MediaDevice</tt> this instance is to work with
*/
protected void deviceSessionChanged(
MediaDeviceSession oldValue,
MediaDeviceSession newValue)
{
synchronized (players)
if (sendStreamsAreCreated)
{
Player [] players
= this.players.toArray(new Player[this.players.size()]);
for (Player player : players)
disposePlayer(player);
closeSendStreams();
if ((newValue != null) && (rtpManager != null))
createSendStreams();
}
}
@ -263,9 +252,21 @@ private void disposePlayers()
* capture media
* @see MediaStream#getDevice()
*/
public CaptureMediaDevice getDevice()
public AbstractMediaDevice getDevice()
{
return getDeviceSession().getDevice();
}
/**
* Gets the <tt>MediaDeviceSession</tt> which represents the work of this
* <tt>MediaStream</tt> with its associated <tt>MediaDevice</tt>.
*
* @return the <tt>MediaDeviceSession</tt> which represents the work of this
* <tt>MediaStream</tt> with its associated <tt>MediaDevice</tt>
*/
protected MediaDeviceSession getDeviceSession()
{
return device;
return deviceSession;
}
/**
@ -278,7 +279,7 @@ public CaptureMediaDevice getDevice()
*/
public MediaFormat getFormat()
{
return getDevice().getFormat();
return getDeviceSession().getFormat();
}
/**
@ -399,18 +400,6 @@ private RTPManager getRTPManager()
return rtpManager;
}
/**
* Notifies this <tt>MediaStream</tt> that a specific <tt>Player</tt> of
* remote content has generated a <tt>RealizeCompleteEvent</tt>. Allows
* extenders to carry out additional processing on the <tt>Player</tt>.
*
* @param player the <tt>Player</tt> which is the source of a
* <tt>RealizeCompleteEvent</tt>
*/
protected void realizeComplete(Player player)
{
}
/**
* Registers any custom JMF <tt>Format</tt>s with a specific
* <tt>RTPManager</tt>. Extenders should override in order to register their
@ -433,7 +422,31 @@ protected void registerCustomCodecFormats(RTPManager rtpManager)
*/
public void setDevice(MediaDevice device)
{
this.device = (CaptureMediaDevice) device;
AbstractMediaDevice abstractMediaDevice = (AbstractMediaDevice) device;
if ((deviceSession == null) || (deviceSession.getDevice() != device))
{
MediaDeviceSession oldValue = deviceSession;
if (deviceSession != null)
{
deviceSession.close();
deviceSession = null;
}
deviceSession = abstractMediaDevice.createSession();
MediaDeviceSession newValue = deviceSession;
deviceSessionChanged(oldValue, newValue);
if (deviceSession != null)
synchronized (receiveStreams)
{
for (ReceiveStream receiveStream : receiveStreams)
deviceSession.addReceiveStream(receiveStream);
}
}
}
/**
@ -446,7 +459,7 @@ public void setDevice(MediaDevice device)
*/
public void setFormat(MediaFormat format)
{
getDevice().setFormat(format);
getDeviceSession().setFormat(format);
}
/**
@ -536,7 +549,7 @@ public void start(MediaDirection direction)
.warn("Failed to start stream " + sendStream, ioe);
}
getDevice().start(MediaDirection.SENDONLY);
getDeviceSession().start(MediaDirection.SENDONLY);
if (MediaDirection.RECVONLY.equals(startedDirection))
startedDirection = MediaDirection.SENDRECV;
@ -590,7 +603,7 @@ else if (startedDirection == null)
ioe);
}
getDevice().start(MediaDirection.RECVONLY);
getDeviceSession().start(MediaDirection.RECVONLY);
if (MediaDirection.SENDONLY.equals(startedDirection))
startedDirection = MediaDirection.SENDONLY;
@ -634,45 +647,10 @@ public void stop(MediaDirection direction)
&& (MediaDirection.SENDRECV.equals(startedDirection)
|| MediaDirection.SENDONLY.equals(startedDirection)))
{
Iterable<SendStream> sendStreams = rtpManager.getSendStreams();
stopSendStreams(false);
if (sendStreams != null)
for (SendStream sendStream : sendStreams)
try
{
sendStream.getDataSource().stop();
sendStream.stop();
try
{
sendStream.close();
}
catch (NullPointerException npe)
{
/*
* Sometimes com.sun.media.rtp.RTCPTransmitter#bye()
* may throw NullPointerException but it does not
* seem to be guaranteed because it does not happen
* while debugging and stopping at a breakpoint on
* SendStream#close(). One of the cases in which it
* appears upon call hang-up is if we do not close
* the "old" SendStreams upon reinvite(s). Though we
* are now closing such SendStreams, ignore the
* exception here just in case because we already
* ignore IOExceptions.
*/
logger
.error(
"Failed to close stream " + sendStream,
npe);
}
}
catch (IOException ioe)
{
logger.warn("Failed to stop stream " + sendStream, ioe);
}
getDevice().stop(MediaDirection.SENDONLY);
if (deviceSession != null)
deviceSession.stop(MediaDirection.SENDONLY);
if (MediaDirection.SENDRECV.equals(startedDirection))
startedDirection = MediaDirection.RECVONLY;
@ -725,7 +703,8 @@ else if (MediaDirection.SENDONLY.equals(startedDirection))
ioe);
}
getDevice().stop(MediaDirection.RECVONLY);
if (deviceSession != null)
deviceSession.stop(MediaDirection.RECVONLY);
if (MediaDirection.SENDRECV.equals(startedDirection))
startedDirection = MediaDirection.SENDONLY;
@ -734,6 +713,64 @@ else if (MediaDirection.RECVONLY.equals(startedDirection))
}
}
/**
* Stops the <tt>SendStream</tt> that this instance is sending to its
* remote peer and optionally closes them.
*
* @param close <tt>true</tt> to close the <tt>SendStream</tt>s that this
* instance is sending to its remote peer after stopping them;
* <tt>false</tt> to only stop them
*/
private void stopSendStreams(boolean close)
{
if (rtpManager == null)
return;
@SuppressWarnings("unchecked")
Iterable<SendStream> sendStreams = rtpManager.getSendStreams();
if (sendStreams == null)
return;
for (SendStream sendStream : sendStreams)
try
{
sendStream.getDataSource().stop();
sendStream.stop();
if (close)
try
{
sendStream.close();
}
catch (NullPointerException npe)
{
/*
* Sometimes com.sun.media.rtp.RTCPTransmitter#bye() may
* throw NullPointerException but it does not seem to be
* guaranteed because it does not happen while debugging
* and stopping at a breakpoint on SendStream#close().
* One of the cases in which it appears upon call
* hang-up is if we do not close the "old" SendStreams
* upon reinvite(s). Though we are now closing such
* SendStreams, ignore the exception here just in case
* because we already ignore IOExceptions.
*/
logger
.error(
"Failed to close stream " + sendStream,
npe);
}
}
catch (IOException ioe)
{
logger.warn("Failed to stop stream " + sendStream, ioe);
}
if (close)
sendStreamsAreCreated = false;
}
/**
* Notifies this <tt>ReceiveStreamListener</tt> that the <tt>RTPManager</tt>
* it is registered with has generated an event related to a <tt>ReceiveStream</tt>.
@ -750,46 +787,33 @@ public void update(ReceiveStreamEvent event)
ReceiveStream receiveStream = event.getReceiveStream();
if (receiveStream != null)
{
DataSource receiveStreamDataSource
= receiveStream.getDataSource();
if (receiveStreamDataSource != null)
synchronized (receiveStreams)
{
Player player = null;
Throwable exception = null;
try
{
player = Manager.createPlayer(receiveStreamDataSource);
}
catch (IOException ioe)
if (!receiveStreams.contains(receiveStream)
&& receiveStreams.add(receiveStream))
{
exception = ioe;
}
catch (NoPlayerException npe)
{
exception = npe;
MediaDeviceSession deviceSession = getDeviceSession();
if (deviceSession != null)
deviceSession.addReceiveStream(receiveStream);
}
}
}
else if (event instanceof TimeoutEvent)
{
ReceiveStream receiveStream = event.getReceiveStream();
if (exception != null)
logger
.error(
"Failed to create player for new receive stream "
+ receiveStream,
exception);
else
if (receiveStream != null)
synchronized (receiveStreams)
{
if (receiveStreams.remove(receiveStream))
{
player.addControllerListener(this);
player.realize();
MediaDeviceSession deviceSession = getDeviceSession();
synchronized (players)
{
players.add(player);
}
if (deviceSession != null)
deviceSession.removeReceiveStream(receiveStream);
}
}
}
}
}

@ -8,10 +8,14 @@
import java.util.*;
import javax.media.*;
import javax.media.format.*;
import javax.sdp.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.impl.neomedia.format.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.format.*;
/**
* Implements static utility methods used by media classes.
@ -22,82 +26,222 @@ public class MediaUtils
{
/**
* Returned by {@link #sdpToJmfEncoding(String)} if it does not know the
* given encoding.
* The constant which stands for an empty array of <tt>MediaFormat</tt>s.
* Explicitly defined in order to reduce unnecessary allocations.
*/
public static final int UNKNOWN_ENCODING = -1;
private static final MediaFormat[] EMPTY_MEDIA_FORMATS = new MediaFormat[0];
/**
* Returns the String encoding, as specified in AudioFormat and VideoFormat,
* corresponding to the format specified in sdpFormatStr
*
* @param sdpEncodingStr the SDP index that we'd like to convert to a JMF
* format.
* @return one of the AudioFormat.XXX or VideoFormat.XXX format strings.
* The <tt>Map</tt> of RTP payload types (expressed as <tt>String</tt>s) to
* <tt>MediaFormat</tt>s.
*/
public static String sdpToJmfEncoding(String sdpEncodingStr)
{
int sdpEncoding = UNKNOWN_ENCODING;
private static final Map<String, MediaFormat[]>
rtpPayloadTypeStr2MediaFormats
= new HashMap<String, MediaFormat[]>();
try
{
sdpEncoding = Integer.parseInt(sdpEncodingStr);
}
catch (NumberFormatException ex)
{
return null;
}
/**
* Returned by {@link #sdpToJmfEncoding(String)} if it does not know the
* given encoding.
*/
public static final int UNKNOWN_ENCODING = -1;
switch (sdpEncoding)
{
case SdpConstants.PCMU:
return AudioFormat.ULAW_RTP;
static
{
mapRtpPayloadTypeToMediaFormats(
SdpConstants.PCMU,
MediaType.AUDIO,
AudioFormat.ULAW_RTP,
8000);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.GSM,
MediaType.AUDIO,
AudioFormat.GSM_RTP,
8000);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.G723,
MediaType.AUDIO,
AudioFormat.G723_RTP,
8000);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.DVI4_8000,
MediaType.AUDIO,
AudioFormat.DVI_RTP,
8000);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.DVI4_16000,
MediaType.AUDIO,
AudioFormat.DVI_RTP,
16000);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.PCMA,
MediaType.AUDIO,
Constants.ALAW_RTP,
8000);
mapRtpPayloadTypeToMediaFormats(
97,
MediaType.AUDIO,
Constants.ILBC_RTP);
mapRtpPayloadTypeToMediaFormats(
98,
MediaType.AUDIO,
Constants.ILBC_RTP);
mapRtpPayloadTypeToMediaFormats(
110,
MediaType.AUDIO,
Constants.SPEEX_RTP);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.G728,
MediaType.AUDIO,
AudioFormat.G728_RTP,
8000);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.G729,
MediaType.AUDIO,
AudioFormat.G729_RTP,
8000);
case SdpConstants.GSM:
return AudioFormat.GSM_RTP;
mapRtpPayloadTypeToMediaFormats(
SdpConstants.H263,
MediaType.VIDEO,
VideoFormat.H263_RTP);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.JPEG,
MediaType.VIDEO,
VideoFormat.JPEG_RTP);
mapRtpPayloadTypeToMediaFormats(
SdpConstants.H261,
MediaType.VIDEO,
VideoFormat.H261_RTP);
mapRtpPayloadTypeToMediaFormats(
Constants.H264_RTP_SDP,
MediaType.VIDEO,
Constants.H264_RTP);
}
case SdpConstants.G723:
return AudioFormat.G723_RTP;
/**
* Adds a new mapping of a specific RTP payload type to a list of
* <tt>MediaFormat</tt>s of a specific <tt>MediaType</tt>, with a specific
* JMF encoding and, optionally, with specific clock rates.
*
* @param rtpPayloadType the RTP payload type to be associated with a list
* of <tt>MediaFormat</tt>s
* @param mediaType the <tt>MediaType</tt> of the <tt>MediaFormat</tt>s to
* be associated with <tt>rtpPayloadType</tt>
* @param jmfEncoding the JMF encoding of the <tt>MediaFormat</tt>s to be
* associated with <tt>rtpPayloadType</tt>
* @param clockRates the optional list of clock rates of the
* <tt>MediaFormat</tt>s to be associated with <tt>rtpPayloadType</tt>
*/
private static void mapRtpPayloadTypeToMediaFormats(
int rtpPayloadType,
MediaType mediaType,
String jmfEncoding,
double... clockRates)
{
int clockRateCount = clockRates.length;
List<MediaFormat> mediaFormats
= new ArrayList<MediaFormat>(clockRateCount);
case SdpConstants.DVI4_8000:
return AudioFormat.DVI_RTP;
if (clockRateCount > 0)
for (double clockRate : clockRates)
{
Format format;
case SdpConstants.DVI4_16000:
return AudioFormat.DVI_RTP;
switch (mediaType)
{
case AUDIO:
format = new AudioFormat(jmfEncoding);
break;
case VIDEO:
format = new VideoFormat(jmfEncoding);
break;
default:
throw new IllegalArgumentException("mediaType");
}
case SdpConstants.PCMA:
return Constants.ALAW_RTP;
if (format != null)
{
MediaFormat mediaFormat
= MediaFormatImpl.createInstance(format, clockRate);
case 97:
return Constants.ILBC_RTP;
if (mediaFormat != null)
mediaFormats.add(mediaFormat);
}
}
else
{
Format format;
case 98:
return Constants.ILBC_RTP;
switch (mediaType)
{
case AUDIO:
format = new AudioFormat(jmfEncoding);
break;
case VIDEO:
format = new VideoFormat(jmfEncoding);
break;
default:
throw new IllegalArgumentException("mediaType");
}
case 110:
return Constants.SPEEX_RTP;
if (format != null)
{
MediaFormat mediaFormat
= MediaFormatImpl.createInstance(format);
case SdpConstants.G728:
return AudioFormat.G728_RTP;
if (mediaFormat != null)
mediaFormats.add(mediaFormat);
}
}
case SdpConstants.G729:
return AudioFormat.G729_RTP;
int mediaFormatCount = mediaFormats.size();
case SdpConstants.H263:
return VideoFormat.H263_RTP;
if (mediaFormatCount > 0)
rtpPayloadTypeStr2MediaFormats
.put(
Integer.toString(rtpPayloadType),
mediaFormats.toArray(new MediaFormat[mediaFormatCount]));
}
case SdpConstants.JPEG:
return VideoFormat.JPEG_RTP;
/**
* Returns the JMF encoding as specified in <tt>AudioFormat</tt> and
* <tt>VideoFormat</tt> corresponding to the specified RTP payload type.
*
* @param rtpPayloadTypeStr the RTP payload type as <tt>String</tt> to get
* the respective JMF encoding of
* @return the JMF encoding corresponding to the specified RTP payload type
*/
public static String rtpPayloadTypeToJmfEncoding(String rtpPayloadTypeStr)
{
MediaFormat[] mediaFormats
= rtpPayloadTypeStr2MediaFormats.get(rtpPayloadTypeStr);
case SdpConstants.H261:
return VideoFormat.H261_RTP;
return
((mediaFormats != null) && (mediaFormats.length > 0))
? ((MediaFormatImpl<? extends Format>) mediaFormats[0])
.getJMFEncoding()
: null;
}
case Constants.H264_RTP_SDP:
return Constants.H264_RTP;
/**
* Gets the <tt>MediaFormat</tt>s (expressed as an array) corresponding to
* a specific RTP payload type (expressed as a <tt>String</tt>).
*
* @param rtpPayloadTypeStr the RTP payload type to retrieve the
* corresponding <tt>MediaFormat</tt>s for
* @return an array of <tt>MediaFormat</tt>s corresponding to the specified
* RTP payload type
*/
public static MediaFormat[] rtpPayloadTypeToMediaFormats(
String rtpPayloadTypeStr)
{
MediaFormat[] mediaFormats
= rtpPayloadTypeStr2MediaFormats.get(rtpPayloadTypeStr);
default:
return null;
}
return
(mediaFormats == null)
? EMPTY_MEDIA_FORMATS
: mediaFormats.clone();
}
/**
@ -113,81 +257,43 @@ public static String sdpToJmfEncoding(String sdpEncodingStr)
public static int jmfToSdpEncoding(String jmfEncoding)
{
if (jmfEncoding == null)
{
return UNKNOWN_ENCODING;
}
else if (jmfEncoding.equals(AudioFormat.ULAW_RTP))
{
return SdpConstants.PCMU;
}
else if (jmfEncoding.equals(Constants.ALAW_RTP))
{
return SdpConstants.PCMA;
}
else if (jmfEncoding.equals(AudioFormat.GSM_RTP))
{
return SdpConstants.GSM;
}
else if (jmfEncoding.equals(AudioFormat.G723_RTP))
{
return SdpConstants.G723;
}
else if (jmfEncoding.equals(AudioFormat.DVI_RTP))
{
return SdpConstants.DVI4_8000;
}
else if (jmfEncoding.equals(AudioFormat.DVI_RTP))
{
return SdpConstants.DVI4_16000;
}
else if (jmfEncoding.equals(AudioFormat.ALAW))
{
return SdpConstants.PCMA;
}
else if (jmfEncoding.equals(AudioFormat.G728_RTP))
{
return SdpConstants.G728;
}
else if (jmfEncoding.equals(AudioFormat.G729_RTP))
{
return SdpConstants.G729;
}
else if (jmfEncoding.equals(VideoFormat.H263_RTP))
{
return SdpConstants.H263;
}
else if (jmfEncoding.equals(VideoFormat.JPEG_RTP))
{
return SdpConstants.JPEG;
}
else if (jmfEncoding.equals(VideoFormat.H261_RTP))
{
return SdpConstants.H261;
}
else if (jmfEncoding.equals(Constants.H264_RTP))
{
return Constants.H264_RTP_SDP;
}
else if (jmfEncoding.equals(Constants.ILBC))
{
return 97;
}
else if (jmfEncoding.equals(Constants.ILBC_RTP))
{
return 97;
}
else if (jmfEncoding.equals(Constants.SPEEX))
{
return 110;
}
else if (jmfEncoding.equals(Constants.SPEEX_RTP))
{
return 110;
}
else
{
return UNKNOWN_ENCODING;
}
}
/**
@ -204,17 +310,13 @@ public static List<String> sdpToJmfEncodings(List<String> sdpEncodings)
List<String> jmfEncodings = new ArrayList<String>();
if (sdpEncodings != null)
{
for (String sdpEncoding : sdpEncodings)
{
String jmfEncoding = sdpToJmfEncoding(sdpEncoding);
String jmfEncoding = rtpPayloadTypeToJmfEncoding(sdpEncoding);
if (jmfEncoding != null)
{
jmfEncodings.add(jmfEncoding);
}
}
}
return jmfEncodings;
}
}

@ -17,6 +17,8 @@
import org.osgi.framework.*;
/**
* Implements <tt>BundleActivator</tt> for the neomedia bundle.
*
* @author Martin Andre
* @author Emil Ivov
* @author Lubomir Marinov
@ -24,38 +26,80 @@
public class NeomediaActivator
implements BundleActivator
{
/**
* The <tt>Logger</tt> used by the <tt>NeomediaActivator</tt> class and its
* instances for logging output.
*/
private final Logger logger = Logger.getLogger(NeomediaActivator.class);
/**
* The context in which the one and only <tt>NeomediaActivator</tt> instance
* has started executing.
*/
private static BundleContext bundleContext;
/**
* The <tt>ConfigurationService</tt> registered in {@link #bundleContext}
* and used by the <tt>NeomediaActivator</tt> instance to read and write
* configuration properties.
*/
private static ConfigurationService configurationService;
/**
* The <tt>FileAccessService</tt> registered in {@link #bundleContext} and
* used by the <tt>NeomediaActivator</tt> instance to safely access files.
*/
private static FileAccessService fileAccessService;
/**
* The one and only <tt>MediaServiceImpl</tt> instance registered in
* {@link #bundleContext} by the <tt>NeomediaActivator</tt> instance.
*/
private static MediaServiceImpl mediaServiceImpl;
/**
* The <tt>NetworkAddressManagerService</tt> registered in
* {@link #bundleContext} and used by the <tt>NeomediaActivator</tt>
* instance for network address resolution.
*/
private static NetworkAddressManagerService networkAddressManagerService;
/**
* The <tt>ResourceManagementService</tt> registered in
* {@link #bundleContext} and representing the resources such as
* internationalized and localized text and images used by the neomedia
* bundle.
*/
private static ResourceManagementService resources;
/**
* The OSGi <tt>ServiceRegistration</tt> of {@link #mediaServiceImpl} in
* {@link #bundleContext}.
*/
private ServiceRegistration mediaServiceRegistration;
/*
* Implements BundleActivator#start(BundleContext).
/**
* Starts the execution of the neomedia bundle in the specified context.
*
* @param bundleContext the context in which the neomedia bundle is to start
* executing
* @throws Exception if an error occurs while starting the execution of the
* neomedia bundle in the specified context
*/
public void start(BundleContext context)
public void start(BundleContext bundleContext)
throws Exception
{
logger.debug("Started.");
NeomediaActivator.bundleContext = context;
NeomediaActivator.bundleContext = bundleContext;
// MediaService
mediaServiceImpl = new MediaServiceImpl();
mediaServiceImpl.start();
mediaServiceRegistration
= context
= bundleContext
.registerService(
MediaService.class.getName(),
mediaServiceImpl,
@ -63,7 +107,7 @@ public void start(BundleContext context)
logger.debug("Media Service ... [REGISTERED]");
// MediaConfigurationForm
context
bundleContext
.registerService(
ConfigurationForm.class.getName(),
new LazyConfigurationForm(
@ -80,10 +124,15 @@ public void start(BundleContext context)
System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true");
}
/*
* Implements BundleActivator#stop(BundleContext).
/**
* Stops the execution of the neomedia bundle in the specified context.
*
* @param bundleContext the context in which the neomedia bundle is to stop
* executing
* @throws Exception if an error occurs while stopping the execution of the
* neomedia bundle in the specified context
*/
public void stop(BundleContext context)
public void stop(BundleContext bundleContext)
throws Exception
{
mediaServiceImpl.stop();
@ -145,6 +194,18 @@ public static FileAccessService getFileAccessService()
return fileAccessService;
}
/**
* Gets the <tt>MediaService</tt> implementation instance registered by the
* neomedia bundle.
*
* @return the <tt>MediaService</tt> implementation instance registered by
* the neomedia bundle
*/
public static MediaServiceImpl getMediaServiceImpl()
{
return mediaServiceImpl;
}
/**
* Returns a reference to a NetworkAddressManagerService implementation
* currently registered in the bundle context or null if no such
@ -169,6 +230,15 @@ public static NetworkAddressManagerService getNetworkAddressManagerService()
return networkAddressManagerService;
}
/**
* Gets the <tt>ResourceManagementService</tt> instance which represents the
* resources such as internationalized and localized text and images used by
* the neomedia bundle.
*
* @return the <tt>ResourceManagementService</tt> instance which represents
* the resources such as internationalized and localized text and images
* used by the neomedia bundle
*/
public static ResourceManagementService getResources()
{
if (resources == null)

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
package net.java.sip.communicator.impl.neomedia;
import javax.media.*;

@ -8,7 +8,6 @@
import java.awt.*;
import java.util.*;
import java.util.List;
import javax.media.*;
import javax.media.control.*;
@ -17,6 +16,7 @@
import javax.media.rtp.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.impl.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.event.*;
@ -48,7 +48,7 @@ public class VideoMediaStreamImpl
private static boolean formatsRegisteredOnce = false;
/**
* Selects the <tt>VideoFormat</tt> from the list of supported formatts of a
* Selects the <tt>VideoFormat</tt> from the list of supported formats of a
* specific video <tt>DataSource</tt> which has a size as close as possible
* to a specific size and sets it as the format of the specified video
* <tt>DataSource</tt>.
@ -152,11 +152,58 @@ public int compare(FormatInfo info0, FormatInfo info1)
}
/**
* The list of <tt>VideoListener</tt>s interested in changes in the
* availability of visual <tt>Component</tt>s depicting video.
* The <tt>VideoListener</tt> which handles <tt>VideoEvent</tt>s from the
* <tt>MediaDeviceSession</tt> of this instance and fires respective
* <tt>VideoEvent</tt>s from this <tt>VideoMediaStream</tt> to its
* <tt>VideoListener</tt>s.
*/
private final List<VideoListener> videoListeners
= new ArrayList<VideoListener>();
private final VideoListener deviceSessionVideoListener
= new VideoListener()
{
/**
* Notifies that a visual <tt>Component</tt> representing
* video has been added to the provider this listener has
* been added to.
*
* @param e a <tt>VideoEvent</tt> describing the added
* visual <tt>Component</tt> representing video and the
* provider it was added into
* @see VideoListener#videoAdded(VideoEvent)
*/
public void videoAdded(VideoEvent e)
{
fireVideoEvent(
e.getType(),
e.getVisualComponent(),
e.getOrigin());
}
/**
* Notifies that a visual <tt>Component</tt> representing
* video has been removed from the provider this listener
* has been added to.
*
* @param e a <tt>VideoEvent</tt> describing the removed
* visual <tt>Component</tt> representing video and the
* provider it was removed from
* @see VideoListener#videoRemoved(VideoEvent)
*/
public void videoRemoved(VideoEvent e)
{
fireVideoEvent(
e.getType(),
e.getVisualComponent(),
e.getOrigin());
}
};
/**
* The facility which aids this instance in managing a list of
* <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them.
*/
private final VideoNotifierSupport videoNotifierSupport
= new VideoNotifierSupport(this);
/**
* Initializes a new <tt>VideoMediaStreamImpl</tt> instance which will use
@ -181,7 +228,7 @@ public VideoMediaStreamImpl(StreamConnector connector, MediaDevice device)
* <p>
* Adding a listener which has already been added does nothing i.e. it is
* not added more than once and thus does not receive one and the same
* <tt>VideoEvent</tt> multiple times
* <tt>VideoEvent</tt> multiple times.
* </p>
*
* @param listener the <tt>VideoListener</tt> to be notified when
@ -190,43 +237,36 @@ public VideoMediaStreamImpl(StreamConnector connector, MediaDevice device)
*/
public void addVideoListener(VideoListener listener)
{
if (listener == null)
throw new NullPointerException("listener");
synchronized (videoListeners)
{
if (!videoListeners.contains(listener))
videoListeners.add(listener);
}
videoNotifierSupport.addVideoListener(listener);
}
/**
* Releases the resources allocated by a specific <tt>Player</tt> in the
* course of its execution and prepares it to be garbage collected. If the
* specified <tt>Player</tt> is rendering video, notifies the
* <tt>VideoListener</tt>s of this <tt>VideoMediaStream</tt> that its visual
* <tt>Component</tt> is to no longer be used by firing a
* {@link VideoEvent#VIDEO_REMOVED} <tt>VideoEvent</tt>.
* Notifies this <tt>MediaStream</tt> that the <tt>MediaDevice</tt> (and
* respectively the <tt>MediaDeviceSession</tt> with it) which this instance
* uses for capture and playback of media has been changed. Makes sure that
* the <tt>VideoListener</tt>s of this instance get <tt>VideoEvent</tt>s for
* the new/current <tt>VideoMediaDeviceSession</tt> and not for the old one.
*
* @param player the <tt>Player</tt> to dispose of
* @param oldValue the <tt>MediaDeviceSession</tt> with the
* <tt>MediaDevice</tt> this instance used work with
* @param newValue the <tt>MediaDeviceSession</tt> with the
* <tt>MediaDevice</tt> this instance is to work with
* @see MediaStreamImpl#deviceSessionChanged(MediaDeviceSession,
* MediaDeviceSession)
*/
@Override
protected void disposePlayer(Player player)
protected void deviceSessionChanged(
MediaDeviceSession oldValue,
MediaDeviceSession newValue)
{
/*
* The player is being disposed so let the (interested) listeners know
* its Player#getVisualComponent() (if any) should be released.
*/
Component visualComponent = getVisualComponent(player);
super.disposePlayer(player);
if (visualComponent != null)
fireVideoEvent(
VideoEvent.VIDEO_REMOVED,
visualComponent,
VideoEvent.REMOTE);
super.deviceSessionChanged(oldValue, newValue);
if (oldValue instanceof VideoMediaDeviceSession)
((VideoMediaDeviceSession) oldValue)
.removeVideoListener(deviceSessionVideoListener);
if (newValue instanceof VideoMediaDeviceSession)
((VideoMediaDeviceSession) newValue)
.addVideoListener(deviceSessionVideoListener);
}
/**
@ -235,41 +275,19 @@ protected void disposePlayer(Player player)
* availability of a specific visual <tt>Component</tt> depicting video.
*
* @param type the type of change as defined by <tt>VideoEvent</tt> in the
* availability of the specified visual <tt>Component</tt> depciting video
* availability of the specified visual <tt>Component</tt> depicting video
* @param visualComponent the visual <tt>Component</tt> depicting video
* which has been added or removed in this <tt>VideoMediaStream</tt>
* @param origin
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it)
*/
protected void fireVideoEvent(
int type,
Component visualComponent,
int origin)
{
VideoListener[] listeners;
synchronized (videoListeners)
{
listeners
= videoListeners
.toArray(new VideoListener[videoListeners.size()]);
}
if (listeners.length > 0)
{
VideoEvent event
= new VideoEvent(this, type, visualComponent, origin);
for (VideoListener listener : listeners)
switch (type)
{
case VideoEvent.VIDEO_ADDED:
listener.videoAdded(event);
break;
case VideoEvent.VIDEO_REMOVED:
listener.videoRemoved(event);
break;
}
}
videoNotifierSupport.fireVideoEvent(type, visualComponent, origin);
}
/**
@ -283,73 +301,12 @@ protected void fireVideoEvent(
*/
public Component getVisualComponent()
{
synchronized (players)
{
for (Player player : players)
{
Component visualComponent = getVisualComponent(player);
if (visualComponent != null)
return visualComponent;
}
}
return null;
}
/**
* Gets the visual <tt>Component</tt> of a specific <tt>Player</tt> if it
* has one and ignores the failure to access it if the specified
* <tt>Player</tt> is unrealized.
*
* @param player the <tt>Player</tt> to get the visual <tt>Component</tt> of
* if it has one
* @return the visual <tt>Component</tt> of the specified <tt>Player</tt> if
* it has one; <tt>null</tt> if the specified <tt>Player</tt> does not have
* a visual <tt>Component</tt> or the <tt>Player</tt> is unrealized
*/
private Component getVisualComponent(Player player)
{
Component visualComponent;
try
{
visualComponent = player.getVisualComponent();
}
catch (NotRealizedError e)
{
visualComponent = null;
if (logger.isDebugEnabled())
logger
.debug(
"Called Player#getVisualComponent() "
+ "on Unrealized player "
+ player,
e);
}
return visualComponent;
}
MediaDeviceSession deviceSession = getDeviceSession();
/**
* Notifies this <tt>MediaStream</tt> that a specific <tt>Player</tt> of
* remote content has generated a <tt>RealizeCompleteEvent</tt>. Allows
* extenders to carry out additional processing on the <tt>Player</tt>.
*
* @param player the <tt>Player</tt> which is the source of a
* <tt>RealizeCompleteEvent</tt>
*/
@Override
protected void realizeComplete(Player player)
{
super.realizeComplete(player);
Component visualComponent = getVisualComponent(player);
if (visualComponent != null)
fireVideoEvent(
VideoEvent.VIDEO_ADDED,
visualComponent,
VideoEvent.REMOTE);
return
(deviceSession instanceof VideoMediaDeviceSession)
? ((VideoMediaDeviceSession) deviceSession).getVisualComponent()
: null;
}
/**
@ -399,9 +356,6 @@ protected void registerCustomCodecFormats(RTPManager rtpManager)
*/
public void removeVideoListener(VideoListener listener)
{
synchronized (videoListeners)
{
videoListeners.remove(listener);
}
videoNotifierSupport.removeVideoListener(listener);
}
}

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
import java.lang.reflect.*;
@ -14,9 +14,9 @@
import javax.media.protocol.*;
/**
* Represents a <code>PushBufferDataSource</code> which provides a single
* <code>PushBufferStream</code> containing the result of the audio mixing of
* <code>DataSource</code>s.
* Represents a <tt>PushBufferDataSource</tt> which provides a single
* <tt>PushBufferStream</tt> containing the result of the audio mixing of
* <tt>DataSource</tt>s.
*
* @author Lubomir Marinov
*/
@ -26,39 +26,39 @@ public class AudioMixingPushBufferDataSource
{
/**
* The <code>AudioMixer</code> performing the audio mixing, managing the
* input <code>DataSource</code>s and pushing the data of this output
* <code>PushBufferDataSource</code>.
* The <tt>AudioMixer</tt> performing the audio mixing, managing the
* input <tt>DataSource</tt>s and pushing the data of this output
* <tt>PushBufferDataSource</tt>.
*/
private final AudioMixer audioMixer;
/**
* The indicator which determines whether this <code>DataSource</code> is
* The indicator which determines whether this <tt>DataSource</tt> is
* connected.
*/
private boolean connected;
/**
* The one and only <code>PushBufferStream</code> this
* <code>PushBufferDataSource</code> provides to its clients and containing
* the result of the audio mixing performed by <code>audioMixer</code>.
* The one and only <tt>PushBufferStream</tt> this
* <tt>PushBufferDataSource</tt> provides to its clients and containing
* the result of the audio mixing performed by <tt>audioMixer</tt>.
*/
private AudioMixingPushBufferStream outputStream;
/**
* The indicator which determines whether this <code>DataSource</code> is
* The indicator which determines whether this <tt>DataSource</tt> is
* started.
*/
private boolean started;
/**
* Initializes a new <code>AudioMixingPushBufferDataSource</code> instance
* Initializes a new <tt>AudioMixingPushBufferDataSource</tt> instance
* which gives access to the result of the audio mixing performed by a
* specific <code>AudioMixer</code>.
* specific <tt>AudioMixer</tt>.
*
* @param audioMixer the <code>AudioMixer</code> performing audio mixing,
* managing the input <code>DataSource</code>s and pushing the
* data of the new output <code>PushBufferDataSource</code>
* @param audioMixer the <tt>AudioMixer</tt> performing audio mixing,
* managing the input <tt>DataSource</tt>s and pushing the
* data of the new output <tt>PushBufferDataSource</tt>
*/
public AudioMixingPushBufferDataSource(AudioMixer audioMixer)
{
@ -66,15 +66,15 @@ public AudioMixingPushBufferDataSource(AudioMixer audioMixer)
}
/**
* Adds a new input <code>DataSource</code> to be mixed by the associated
* <code>AudioMixer</code> of this instance and to not have its audio
* Adds a new input <tt>DataSource</tt> to be mixed by the associated
* <tt>AudioMixer</tt> of this instance and to not have its audio
* contributions included in the mixing output represented by this
* <code>DataSource</code>.
* <tt>DataSource</tt>.
*
* @param inputDataSource a <code>DataSource</code> to be added for mixing
* to the <code>AudioMixer</code> associate with this instance
* @param inputDataSource a <tt>DataSource</tt> to be added for mixing
* to the <tt>AudioMixer</tt> associate with this instance
* and to not have its audio contributions included in the mixing
* output represented by this <code>DataSource</code>
* output represented by this <tt>DataSource</tt>
*/
public void addInputDataSource(DataSource inputDataSource)
{

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
@ -12,11 +12,11 @@
import javax.media.format.*;
import javax.media.protocol.*;
import net.java.sip.communicator.impl.media.*;
import net.java.sip.communicator.impl.neomedia.*;
/**
* Represents a <code>PushBufferStream</code> containing the result of the audio
* mixing of <code>DataSource</code>s.
* Represents a <tt>PushBufferStream</tt> containing the result of the audio
* mixing of <tt>DataSource</tt>s.
*
* @author Lubomir Marinov
*/
@ -25,50 +25,50 @@ public class AudioMixingPushBufferStream
{
/**
* The <code>AudioMixer.AudioMixerPushBufferStream</code> which reads data
* from the input <code>DataSource</code>s and pushes it to this instance to
* The <tt>AudioMixer.AudioMixerPushBufferStream</tt> which reads data
* from the input <tt>DataSource</tt>s and pushes it to this instance to
* be mixed.
*/
private final AudioMixer.AudioMixerPushBufferStream audioMixerStream;
/**
* The <code>AudioMixingPushBufferDataSource</code> which created and owns
* The <tt>AudioMixingPushBufferDataSource</tt> which created and owns
* this instance and defines the input data which is to not be mixed in the
* output of this <code>PushBufferStream</code>.
* output of this <tt>PushBufferStream</tt>.
*/
private final AudioMixingPushBufferDataSource dataSource;
/**
* The collection of input audio samples still not mixed and read through
* this <code>AudioMixingPushBufferStream</code>.
* this <tt>AudioMixingPushBufferStream</tt>.
*/
private int[][] inputSamples;
/**
* The maximum number of per-stream audio samples available through
* <code>inputSamples</code>.
* <tt>inputSamples</tt>.
*/
private int maxInputSampleCount;
/**
* The <code>BufferTransferHandler</code> through which this
* <code>PushBufferStream</code> notifies its clients that new data is
* The <tt>BufferTransferHandler</tt> through which this
* <tt>PushBufferStream</tt> notifies its clients that new data is
* available for reading.
*/
private BufferTransferHandler transferHandler;
/**
* Initializes a new <code>AudioMixingPushBufferStream</code> mixing the
* Initializes a new <tt>AudioMixingPushBufferStream</tt> mixing the
* input data of a specific
* <code>AudioMixer.AudioMixerPushBufferStream</code> and excluding from the
* <tt>AudioMixer.AudioMixerPushBufferStream</tt> and excluding from the
* mix the audio contributions of a specific
* <code>AudioMixingPushBufferDataSource</code>.
* <tt>AudioMixingPushBufferDataSource</tt>.
*
* @param audioMixerStream the
* <code>AudioMixer.AudioMixerPushBufferStream</code> reading
* data from input <code>DataSource</code>s and to push it to the
* new <code>AudioMixingPushBufferStream</code>
* @param dataSource the <code>AudioMixingPushBufferDataSource</code> which
* <tt>AudioMixer.AudioMixerPushBufferStream</tt> reading
* data from input <tt>DataSource</tt>s and to push it to the
* new <tt>AudioMixingPushBufferStream</tt>
* @param dataSource the <tt>AudioMixingPushBufferDataSource</tt> which
* has requested the initialization of the new instance and which
* defines the input data to not be mixed in the output of the
* new instance
@ -131,13 +131,13 @@ public Object[] getControls()
}
/**
* Gets the <code>AudioMixingPushBufferDataSource</code> which created and
* Gets the <tt>AudioMixingPushBufferDataSource</tt> which created and
* owns this instance and defines the input data which is to not be mixed in
* the output of this <code>PushBufferStream</code>.
* the output of this <tt>PushBufferStream</tt>.
*
* @return the <code>AudioMixingPushBufferDataSource</code> which created
* @return the <tt>AudioMixingPushBufferDataSource</tt> which created
* and owns this instance and defines the input data which is to not
* be mixed in the output of this <code>PushBufferStream</code>
* be mixed in the output of this <tt>PushBufferStream</tt>
*/
public AudioMixingPushBufferDataSource getDataSource()
{
@ -155,12 +155,12 @@ public AudioFormat getFormat()
/**
* Gets the maximum possible value for an audio sample of a specific
* <code>AudioFormat</code>.
* <tt>AudioFormat</tt>.
*
* @param outputFormat the <code>AudioFormat</code> of which to get the
* @param outputFormat the <tt>AudioFormat</tt> of which to get the
* maximum possible value for an audio sample
* @return the maximum possible value for an audio sample of the specified
* <code>AudioFormat</code>
* <tt>AudioFormat</tt>
* @throws UnsupportedFormatException
*/
private static int getMaxOutputSample(AudioFormat outputFormat)
@ -186,11 +186,11 @@ private static int getMaxOutputSample(AudioFormat outputFormat)
/**
* Mixes as in audio mixing a specified collection of audio sample sets and
* returns the resulting mix audio sample set in a specific
* <code>AudioFormat</code>.
* <tt>AudioFormat</tt>.
*
* @param inputSamples the collection of audio sample sets to be mixed into
* one audio sample set in the sense of audio mixing
* @param outputFormat the <code>AudioFormat</code> in which the resulting
* @param outputFormat the <tt>AudioFormat</tt> in which the resulting
* mix audio sample set is to be produced
* @param outputSampleCount the size of the resulting mix audio sample set
* to be produced
@ -310,7 +310,7 @@ public void read(Buffer buffer)
* @param inputSamples the collection of audio sample sets to be mixed by
* this stream when data is read from it
* @param maxInputSampleCount the maximum number of per-stream audio samples
* available through <code>inputSamples</code>
* available through <tt>inputSamples</tt>
*/
void setInputSamples(int[][] inputSamples, int maxInputSampleCount)
{
@ -353,7 +353,7 @@ void stop()
* @param input the integer to be written out as a series of bytes
* @param output the output to receive the conversion of the specified
* integer to a series of bytes
* @param outputOffset the offset in <code>output</code> at which the
* @param outputOffset the offset in <tt>output</tt> at which the
* writing of the result of the conversion is to be started
*/
private static void writeInt(int input, byte[] output, int outputOffset)

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
@ -12,9 +12,9 @@
import javax.media.protocol.*;
/**
* Represents a base class for adapters of <code>SourceStream</code>s, usually
* ones reading data in arrays of bytes and not in <code>Buffer</code>s, to
* <code>SourceStream</code>s reading data in <code>Buffer</code>s. An example
* Represents a base class for adapters of <tt>SourceStream</tt>s, usually
* ones reading data in arrays of bytes and not in <tt>Buffer</tt>s, to
* <tt>SourceStream</tt>s reading data in <tt>Buffer</tt>s. An example
* use is creating a PushBufferStream representation of a PushSourceStream.
*
* @author Lubomir Marinov
@ -24,20 +24,20 @@ public abstract class BufferStreamAdapter<T extends SourceStream>
{
/**
* The <code>Format</code> of this stream to be reported through the output
* <code>Buffer</code> this instance reads data into.
* The <tt>Format</tt> of this stream to be reported through the output
* <tt>Buffer</tt> this instance reads data into.
*/
private final Format format;
/**
* The <code>SourceStream</code> being adapted by this instance.
* The <tt>SourceStream</tt> being adapted by this instance.
*/
protected final T stream;
/**
* Initializes a new <code>BufferStreamAdapter</code> which is to adapt a
* specific <code>SourceStream</code> into a <code>SourceStream</code> with
* a specific <code>Format</code>.
* Initializes a new <tt>BufferStreamAdapter</tt> which is to adapt a
* specific <tt>SourceStream</tt> into a <tt>SourceStream</tt> with
* a specific <tt>Format</tt>.
*
* @param stream
* @param format
@ -93,9 +93,9 @@ public Object[] getControls()
}
/**
* Gets the <code>Format</code> of the data this stream provides.
* Gets the <tt>Format</tt> of the data this stream provides.
*
* @return the <code>Format</code> of the data this stream provides
* @return the <tt>Format</tt> of the data this stream provides
*/
public Format getFormat()
{
@ -103,14 +103,14 @@ public Format getFormat()
}
/**
* Reads byte data from this stream into a specific <code>Buffer</code>
* Reads byte data from this stream into a specific <tt>Buffer</tt>
* which is to use a specific array of bytes for its data.
*
* @param buffer the <code>Buffer</code> to read byte data into from this
* @param buffer the <tt>Buffer</tt> to read byte data into from this
* instance
* @param bytes the array of <code>byte</code>s to read data into from this
* @param bytes the array of <tt>byte</tt>s to read data into from this
* instance and to be set as the data of the specified
* <code>buffer</code>
* <tt>buffer</tt>
* @throws IOException
*/
protected void read(Buffer buffer, byte[] bytes)
@ -134,17 +134,17 @@ protected void read(Buffer buffer, byte[] bytes)
/**
* Reads byte data from this stream into a specific array of
* <code>byte</code>s starting the storing at a specific offset and reading
* <tt>byte</tt>s starting the storing at a specific offset and reading
* at most a specific number of bytes.
*
* @param buffer the array of <code>byte</code>s into which the data read
* @param buffer the array of <tt>byte</tt>s into which the data read
* from this stream is to be written
* @param offset the offset in the specified <code>buffer</code> at which
* @param offset the offset in the specified <tt>buffer</tt> at which
* writing data read from this stream should start
* @param length the maximum number of bytes to be written into the
* specified <code>buffer</code>
* specified <tt>buffer</tt>
* @return the number of bytes read from this stream and written into the
* specified <code>buffer</code>
* specified <tt>buffer</tt>
* @throws IOException
*/
protected abstract int read(byte[] buffer, int offset, int length)

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
@ -12,19 +12,19 @@
import javax.media.format.*;
import javax.media.protocol.*;
import net.java.sip.communicator.impl.media.*;
import net.java.sip.communicator.impl.neomedia.*;
/**
* Enables reading from a <code>PushBufferStream</code> a certain maximum number
* Enables reading from a <tt>PushBufferStream</tt> a certain maximum number
* of data units (e.g. bytes, shorts, ints) even if the
* <code>PushBufferStream</code> itself pushes a larger number of data units.
* <tt>PushBufferStream</tt> itself pushes a larger number of data units.
* <p>
* An example use of this functionality is pacing a
* <code>PushBufferStream</code> which pushes more data units in a single step
* than a <code>CaptureDevice</code>. When these two undergo audio mixing, the
* <tt>PushBufferStream</tt> which pushes more data units in a single step
* than a <tt>CaptureDevice</tt>. When these two undergo audio mixing, the
* different numbers of per-push data units will cause the
* <code>PushBufferStream</code> "play" itself faster than the
* <code>CaptureDevice</code>.
* <tt>PushBufferStream</tt> "play" itself faster than the
* <tt>CaptureDevice</tt>.
* </p>
*
* @author Lubomir Marinov
@ -34,32 +34,32 @@ public class CachingPushBufferStream
{
/**
* The <code>Buffer</code> in which this instance stores the data it reads
* from the wrapped <code>PushBufferStream</code> and from which it reads in
* chunks later on when its <code>#read(Buffer)</code> method is called.
* The <tt>Buffer</tt> in which this instance stores the data it reads
* from the wrapped <tt>PushBufferStream</tt> and from which it reads in
* chunks later on when its <tt>#read(Buffer)</tt> method is called.
*/
private Buffer cache;
/**
* The last <code>IOException</code> this stream has received from the
* <code>#read(Buffer)</code> method of the wrapped stream and to be thrown
* by this stream on the earliest call of its <code>#read(Buffer)</code>
* The last <tt>IOException</tt> this stream has received from the
* <tt>#read(Buffer)</tt> method of the wrapped stream and to be thrown
* by this stream on the earliest call of its <tt>#read(Buffer)</tt>
* method.
*/
private IOException readException;
/**
* The <code>PushBufferStream</code> being paced by this instance with
* The <tt>PushBufferStream</tt> being paced by this instance with
* respect to the maximum number of data units it provides in a single push.
*/
private final PushBufferStream stream;
/**
* Initializes a new <code>CachingPushBufferStream</code> instance which is
* Initializes a new <tt>CachingPushBufferStream</tt> instance which is
* to pace the number of per-push data units a specific
* <code>PushBufferStream</code> provides.
* <tt>PushBufferStream</tt> provides.
*
* @param stream the <code>PushBufferStream</code> to be paced with respect
* @param stream the <tt>PushBufferStream</tt> to be paced with respect
* to the number of per-push data units it provides
*/
public CachingPushBufferStream(PushBufferStream stream)
@ -129,8 +129,8 @@ public Format getFormat()
/**
* Gets the object this instance uses for synchronization of the operations
* (such as reading from the wrapped stream into the cache of this instance
* and reading out of the cache into the <code>Buffer</code> provided to the
* <code>#read(Buffer)</code> method of this instance) it performs in
* and reading out of the cache into the <tt>Buffer</tt> provided to the
* <tt>#read(Buffer)</tt> method of this instance) it performs in
* various threads.
*
* @return the object this instance uses for synchronization of the
@ -187,18 +187,18 @@ public void read(Buffer buffer)
}
/**
* Reads data from a specific input <code>Buffer</code> (if such data is
* Reads data from a specific input <tt>Buffer</tt> (if such data is
* available) and writes the read data into a specific output
* <code>Buffer</code>. The input <code>Buffer</code> will be modified to
* reflect the number of read data units. If the output <code>Buffer</code>
* <tt>Buffer</tt>. The input <tt>Buffer</tt> will be modified to
* reflect the number of read data units. If the output <tt>Buffer</tt>
* has allocated an array for storing the read data and the type of this
* array matches that of the input <code>Buffer</code>, it will be used and
* thus the output <code>Buffer</code> may control the maximum number of
* array matches that of the input <tt>Buffer</tt>, it will be used and
* thus the output <tt>Buffer</tt> may control the maximum number of
* data units to be read into it.
*
* @param input the <code>Buffer</code> to read data from
* @param output the <code>Buffer</code> into which to write the data read
* from the specified <code>input</code>
* @param input the <tt>Buffer</tt> to read data from
* @param output the <tt>Buffer</tt> into which to write the data read
* from the specified <tt>input</tt>
* @throws IOException
* @throws UnsupportedFormatException
*/

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
@ -13,8 +13,8 @@
import javax.media.protocol.*;
/**
* Represents a <code>PullBufferStream</code> which reads its data from a
* specific <code>PullSourceStream</code>.
* Represents a <tt>PullBufferStream</tt> which reads its data from a
* specific <tt>PullSourceStream</tt>.
*
* @author Lubomir Marinov
*/
@ -24,14 +24,14 @@ public class PullBufferStreamAdapter
{
/**
* Initializes a new <code>PullBufferStreamAdapter</code> instance which
* reads its data from a specific <code>PullSourceStream</code> with a
* specific <code>Format</code>
* Initializes a new <tt>PullBufferStreamAdapter</tt> instance which
* reads its data from a specific <tt>PullSourceStream</tt> with a
* specific <tt>Format</tt>
*
* @param stream the <code>PullSourceStream</code> the new instance is to
* @param stream the <tt>PullSourceStream</tt> the new instance is to
* read its data from
* @param format the <code>Format</code> of the specified input
* <code>stream</code> and of the new instance
* @param format the <tt>Format</tt> of the specified input
* <tt>stream</tt> and of the new instance
*/
public PullBufferStreamAdapter(PullSourceStream stream, Format format)
{
@ -40,12 +40,12 @@ public PullBufferStreamAdapter(PullSourceStream stream, Format format)
/**
* Gets the frame size measured in bytes defined by a specific
* <code>Format</code>.
* <tt>Format</tt>.
*
* @param format the <code>Format</code> to determine the frame size in
* @param format the <tt>Format</tt> to determine the frame size in
* bytes of
* @return the frame size measured in bytes defined by the specified
* <code>Format</code>
* <tt>Format</tt>
*/
private static int getFrameSizeInBytes(Format format)
{

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
@ -12,8 +12,8 @@
import javax.media.protocol.*;
/**
* Represents a <code>PushBufferStream</code> which reads its data from a
* specific <code>PushSourceStream</code>.
* Represents a <tt>PushBufferStream</tt> which reads its data from a
* specific <tt>PushSourceStream</tt>.
*
* @author Lubomir Marinov
*/
@ -23,14 +23,14 @@ public class PushBufferStreamAdapter
{
/**
* Initializes a new <code>PushBufferStreamAdapter</code> instance which
* reads its data from a specific <code>PushSourceStream</code> with a
* specific <code>Format</code>
* Initializes a new <tt>PushBufferStreamAdapter</tt> instance which
* reads its data from a specific <tt>PushSourceStream</tt> with a
* specific <tt>Format</tt>
*
* @param stream the <code>PushSourceStream</code> the new instance is to
* @param stream the <tt>PushSourceStream</tt> the new instance is to
* read its data from
* @param format the <code>Format</code> of the specified input
* <code>stream</code> and of the new instance
* @param format the <tt>Format</tt> of the specified input
* <tt>stream</tt> and of the new instance
*/
public PushBufferStreamAdapter(PushSourceStream stream, Format format)
{

@ -4,7 +4,7 @@
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.conference;
package net.java.sip.communicator.impl.neomedia.conference;
import java.io.*;
import java.lang.reflect.*;
@ -14,13 +14,13 @@
import javax.media.format.*;
import javax.media.protocol.*;
import net.java.sip.communicator.impl.media.*;
import net.java.sip.communicator.impl.neomedia.*;
/**
* Represents a <code>DataSource</code> which transcodes the tracks of a
* specific input <code>DataSource</code> into a specific output
* <code>Format</code>. The transcoding is attempted only for tracks which
* actually support it for the specified output <code>Format</code>.
* Represents a <tt>DataSource</tt> which transcodes the tracks of a
* specific input <tt>DataSource</tt> into a specific output
* <tt>Format</tt>. The transcoding is attempted only for tracks which
* actually support it for the specified output <tt>Format</tt>.
*
* @author Lubomir Marinov
*/
@ -29,39 +29,39 @@ public class TranscodingDataSource
{
/**
* The <code>DataSource</code> which has its tracks transcoded by this
* The <tt>DataSource</tt> which has its tracks transcoded by this
* instance.
*/
private final DataSource inputDataSource;
/**
* The <code>DataSource</code> which contains the transcoded tracks of
* <code>inputDataSource</code> and which is wrapped by this instance. It is
* the output of <code>transcodingProcessor</code>.
* The <tt>DataSource</tt> which contains the transcoded tracks of
* <tt>inputDataSource</tt> and which is wrapped by this instance. It is
* the output of <tt>transcodingProcessor</tt>.
*/
private DataSource outputDataSource;
/**
* The <code>Format</code> in which the tracks of
* <code>inputDataSource</code> are transcoded.
* The <tt>Format</tt> in which the tracks of
* <tt>inputDataSource</tt> are transcoded.
*/
private final Format outputFormat;
/**
* The <code>Processor</code> which carries out the actual transcoding of
* the tracks of <code>inputDataSource</code>.
* The <tt>Processor</tt> which carries out the actual transcoding of
* the tracks of <tt>inputDataSource</tt>.
*/
private Processor transcodingProcessor;
/**
* Initializes a new <code>TranscodingDataSource</code> instance to
* transcode the tracks of a specific <code>DataSource</code> into a
* specific output <code>Format</code>.
* Initializes a new <tt>TranscodingDataSource</tt> instance to
* transcode the tracks of a specific <tt>DataSource</tt> into a
* specific output <tt>Format</tt>.
*
* @param inputDataSource the <code>DataSource</code> which is to have its
* tracks transcoded in a specific outptu <code>Format</code>
* @param outputFormat the <code>Format</code> in which the new instance is
* to transcode the tracks of <code>inputDataSource</code>
* @param inputDataSource the <tt>DataSource</tt> which is to have its
* tracks transcoded in a specific outptu <tt>Format</tt>
* @param outputFormat the <tt>Format</tt> in which the new instance is
* to transcode the tracks of <tt>inputDataSource</tt>
*/
public TranscodingDataSource(
DataSource inputDataSource,
@ -229,10 +229,10 @@ public Time getDuration()
/**
* Gets the output streams that this instance provides. Some of them may be
* the result of transcoding the tracks of the input <code>DataSource</code>
* of this instance in the output <code>Format</code> of this instance.
* the result of transcoding the tracks of the input <tt>DataSource</tt>
* of this instance in the output <tt>Format</tt> of this instance.
*
* @return an array of <code>SourceStream</code>s which represents the
* @return an array of <tt>SourceStream</tt>s which represents the
* collection of output streams that this instance provides
*/
public SourceStream[] getStreams()

@ -78,7 +78,7 @@ protected void setCaptureDevice(CaptureDevice captureDevice)
*/
public void setMute(boolean mute)
{
DataSource captureDevice = getCaptureDevice();
CaptureDevice captureDevice = getCaptureDevice();
if (captureDevice instanceof MutePushBufferDataSource)
((MutePushBufferDataSource) captureDevice).setMute(mute);

@ -6,10 +6,8 @@
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.awt.*;
import java.io.*;
import java.util.*;
import java.util.List;
import javax.media.*;
import javax.media.control.*;
@ -17,6 +15,7 @@
import javax.media.protocol.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.impl.neomedia.format.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
@ -29,7 +28,7 @@
* @author Lubomir Marinov
*/
public class CaptureMediaDevice
implements MediaDevice
extends AbstractMediaDevice
{
/**
@ -43,7 +42,7 @@ public class CaptureMediaDevice
* The JMF <tt>CaptureDevice</tt> this instance wraps and provides an
* implementation of <tt>MediaDevice</tt> for.
*/
private DataSource captureDevice;
private CaptureDevice captureDevice;
/**
* The <tt>CaptureDeviceInfo</tt> of {@link #captureDevice}.
@ -62,12 +61,6 @@ public class CaptureMediaDevice
*/
private final MediaType mediaType;
/**
* The JMF <tt>Processor</tt> which transcodes {@link #captureDevice} into
* the format of this instance.
*/
private Processor processor;
/**
* Initializes a new <tt>CaptureMediaDevice</tt> instance which is to
* provide an implementation of <tt>MediaDevice</tt> for a specific
@ -115,128 +108,37 @@ public CaptureMediaDevice(
}
/**
* For JPEG and H263, we know that they only work for particular
* sizes. So we'll perform extra checking here to make sure they
* are of the right sizes.
* Notifies this instance that its <tt>captureDevice</tt> (the JMF
* <tt>CaptureDevice</tt> this instance wraps and provides an implementation
* of <tt>MediaDevice</tt> for) property has changed its value from
* <tt>oldValue</tt> to <tt>newValue</tt>. Allows extenders to override in
* order to perform additional processing of the new <tt>captureDevice</tt>
* once it is clear that it is set into this instance.
*
* @param sourceFormat the original format that we'd like to check for
* size.
* @return the modified <tt>VideoFormat</tt> set to the size we support.
*/
private VideoFormat assertSize(VideoFormat sourceFormat)
{
int width, height;
// JPEG
if (sourceFormat.matches(new Format(VideoFormat.JPEG_RTP)))
{
Dimension size = sourceFormat.getSize();
// For JPEG, make sure width and height are divisible by 8.
width = (size.width % 8 == 0)
? size.width
: ( ( (size.width / 8)) * 8);
height = (size.height % 8 == 0)
? size.height
: (size.height / 8) * 8;
}
// H.263
else if (sourceFormat.matches(new Format(VideoFormat.H263_RTP)))
{
// For H.263, we only support some specific sizes.
// if (size.width < 128)
// {
// width = 128;
// height = 96;
// }
// else if (size.width < 176)
// {
// width = 176;
// height = 144;
// }
// else
// {
width = 352;
height = 288;
// }
}
else
{
// We don't know this particular format. We'll just
// leave it alone then.
return sourceFormat;
}
VideoFormat result = new VideoFormat(null,
new Dimension(width, height),
Format.NOT_SPECIFIED,
null,
Format.NOT_SPECIFIED);
return (VideoFormat) result.intersects(sourceFormat);
}
/**
* Releases the resources allocated by this instance in the course of its
* execution and prepares it to be garbage collected.
* @param oldValue the JMF <tt>CaptureDevice</tt> which was the value of the
* <tt>captureDevice</tt> property of this instance before <tt>newValue</tt>
* was set
* @param newValue the JMF <tt>CaptureDevice</tt> which is the value of the
* <tt>captureDevice</tt> property of this instance and which replaced
* <tt>oldValue</tt>
*/
public void close()
protected void captureDeviceChanged(
CaptureDevice oldValue,
CaptureDevice newValue)
{
if (captureDevice != null)
{
/*
* As reported by Carlos Alexandre, stopping before disconnecting
* resolves a slow disconnect on Linux.
*/
try
{
captureDevice.stop();
}
catch (IOException ex)
{
/*
* We cannot do much about the exception because we're not
* really interested in the stopping but rather in calling
* DataSource#disconnect() anyway.
*/
logger.error("Failed to properly stop avDataSource.", ex);
}
captureDevice.disconnect();
}
if (processor != null)
{
processor.stop();
if (processor.getState() == Processor.Realized)
{
DataSource dataOutput = processor.getDataOutput();
if (dataOutput != null)
dataOutput.disconnect();
}
processor.deallocate();
processor.close();
}
}
/**
* Finds the first <tt>Format</tt> instance in a specific list of
* <tt>Format</tt>s which matches a specific <tt>Format</tt>. The
* implementation considers a pair of <tt>Format</tt>s matching if they have
* the same encoding.
* Creates a <tt>DataSource</tt> instance for this <tt>MediaDevice</tt>
* which gives access to the captured media.
*
* @param formats the array of <tt>Format</tt>s to be searched for a match
* to the specified <tt>format</tt>
* @param format the <tt>Format</tt> to search for a match in the specified
* <tt>formats</tt>
* @return the first element of <tt>formats</tt> which matches
* <tt>format</tt> i.e. is of the same encoding
* @return a <tt>DataSource</tt> instance which gives access to the media
* captured by this <tt>MediaDevice</tt>
* @see AbstractMediaDevice#createOutputDataSource()
*/
private Format findFirstMatchingFormat(Format[] formats, Format format)
DataSource createOutputDataSource()
{
for (Format match : formats)
if (match.isSameEncoding(format))
return match;
return null;
return (DataSource) getConnectedCaptureDevice();
}
/**
@ -246,29 +148,56 @@ private Format findFirstMatchingFormat(Format[] formats, Format format)
* @return the JMF <tt>CaptureDevice</tt> this instance wraps and provides
* an implementation of <tt>MediaDevice</tt> for
*/
protected DataSource getCaptureDevice()
public CaptureDevice getCaptureDevice()
{
if (captureDevice == null)
{
CaptureDevice captureDevice = null;
Throwable exception = null;
try
{
setCaptureDevice(
(CaptureDevice)
captureDevice
= (CaptureDevice)
Manager
.createDataSource(captureDeviceInfo.getLocator()));
.createDataSource(captureDeviceInfo.getLocator());
}
catch (IOException ioe)
{
// TODO
exception = ioe;
}
catch (NoDataSourceException ndse)
{
// TODO
exception = ndse;
}
if (exception != null)
logger
.error(
"Failed to create CaptureDevice DataSource "
+ "from CaptureDeviceInfo "
+ captureDeviceInfo,
exception);
else
setCaptureDevice(captureDevice);
}
return captureDevice;
}
/**
* Gets the <tt>CaptureDeviceInfo</tt> of the JMF <tt>CaptureDevice</tt>
* represented by this instance.
*
* @return the <tt>CaptureDeviceInfo</tt> of the <tt>CaptureDevice</tt>
* represented by this instance
*/
public CaptureDeviceInfo getCaptureDeviceInfo()
{
return captureDeviceInfo;
}
/**
* Gets the JMF <tt>CaptureDevice</tt> this instance wraps and provides an
* implementation of <tt>MediaDevice</tt> for in a connected state. If the
@ -281,9 +210,9 @@ protected DataSource getCaptureDevice()
* <tt>null</tt> if this instance has failed to create a
* <tt>CaptureDevice</tt> instance or to connect to it
*/
private DataSource getConnectedCaptureDevice()
private CaptureDevice getConnectedCaptureDevice()
{
DataSource captureDevice = getCaptureDevice();
CaptureDevice captureDevice = getCaptureDevice();
if ((captureDevice != null) && !captureDeviceIsConnected)
{
@ -331,7 +260,7 @@ private DataSource getConnectedCaptureDevice()
{
Control bufferControl
= (Control)
captureDevice
((DataSource) captureDevice)
.getControl(
"javax.media.control.BufferControl");
@ -346,22 +275,6 @@ private DataSource getConnectedCaptureDevice()
return captureDevice;
}
/**
* Gets the output <tt>DataSource</tt> of this instance which provides the
* captured (RTP) data to be sent by <tt>MediaStream</tt> to
* <tt>MediaStreamTarget</tt>.
*
* @return the output <tt>DataSource</tt> of this instance which provides
* the captured (RTP) data to be sent by <tt>MediaStream</tt> to
* <tt>MediaStreamTarget</tt>
*/
public DataSource getDataSource()
{
Processor processor = getProcessor();
return (processor == null) ? null : processor.getDataOutput();
}
/**
* Returns the <tt>MediaDirection</tt> supported by this device.
*
@ -386,23 +299,16 @@ public MediaDirection getDirection()
*/
public MediaFormat getFormat()
{
Processor processor = getProcessor();
MediaType mediaType = getMediaType();
if (processor != null)
for (FormatControl formatControl
: getCaptureDevice().getFormatControls())
{
MediaType mediaType = getMediaType();
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
MediaFormat format
= MediaFormatImpl.createInstance(trackControl.getFormat());
MediaFormat format
= MediaFormatImpl.createInstance(formatControl.getFormat());
if ((format != null) && format.getMediaType().equals(mediaType))
return format;
}
if ((format != null) && format.getMediaType().equals(mediaType))
return format;
}
return null;
}
@ -419,63 +325,6 @@ public MediaType getMediaType()
return mediaType;
}
/**
* Gets the JMF <tt>Processor</tt> which transcodes the
* <tt>CaptureDevice</tt> wrapped by this instance into the format of this
* instance.
*
* @return the JMF <tt>Processor</tt> which transcodes the
* <tt>CaptureDevice</tt> wrapped by this instance into the format of this
* instance
*/
private Processor getProcessor()
{
if (processor == null)
{
DataSource captureDevice = getConnectedCaptureDevice();
if (captureDevice != null)
{
Processor processor = null;
try
{
processor = Manager.createProcessor(captureDevice);
}
catch (IOException ioe)
{
// TODO
}
catch (NoProcessorException npe)
{
// TODO
}
if (waitForState(processor, Processor.Configured))
{
try
{
processor
.setContentDescriptor(
new ContentDescriptor(
ContentDescriptor.RAW_RTP));
}
catch (NotConfiguredError nce)
{
// TODO
processor = null;
}
if (processor != null)
this.processor = processor;
}
else
processor = null;
}
}
return processor;
}
/**
* Gets a list of <tt>MediaFormat</tt>s supported by this
* <tt>MediaDevice</tt>.
@ -485,205 +334,147 @@ private Processor getProcessor()
*/
public List<MediaFormat> getSupportedFormats()
{
Processor processor = getProcessor();
Set<Format> supportedFormats = new HashSet<Format>();
MediaType mediaType = getMediaType();
EncodingConfiguration encodingConfiguration
= NeomediaActivator.getMediaServiceImpl().getEncodingConfiguration();
String[] supportedEncodings;
if (processor != null)
switch (mediaType)
{
MediaType mediaType = getMediaType();
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
for (Format supportedFormat : trackControl.getSupportedFormats())
switch (mediaType)
{
case AUDIO:
if (supportedFormat instanceof AudioFormat)
supportedFormats.add(supportedFormat);
break;
case VIDEO:
if (supportedFormat instanceof VideoFormat)
supportedFormats.add(supportedFormat);
break;
}
}
case AUDIO:
supportedEncodings
= encodingConfiguration.getSupportedAudioEncodings();
break;
case VIDEO:
supportedEncodings
= encodingConfiguration.getSupportedVideoEncodings();
break;
default:
supportedEncodings = null;
break;
}
List<MediaFormat> supportedMediaFormats
= new ArrayList<MediaFormat>(supportedFormats.size());
List<MediaFormat> supportedFormats = new ArrayList<MediaFormat>();
for (Format format : supportedFormats)
supportedMediaFormats.add(MediaFormatImpl.createInstance(format));
return supportedMediaFormats;
}
if (supportedEncodings != null)
for (String supportedPayloadType : supportedEncodings)
{
MediaFormat[] supportedFormatsForPayloadType
= MediaUtils
.rtpPayloadTypeToMediaFormats(supportedPayloadType);
/**
* Sets the JMF <tt>CaptureDevice</tt> this instance wraps and provides a
* <tt>MediaDevice</tt> implementation for. Allows extenders to override in
* order to customize <tt>captureDevice</tt> including to replace it.
*
* @param captureDevice the JMF <tt>CaptureDevice</tt> this instance is to
* wrap and provide a <tt>MediaDevice</tt> implementation for
*/
protected void setCaptureDevice(CaptureDevice captureDevice)
{
if (this.captureDevice != captureDevice)
{
this.captureDevice = (DataSource) captureDevice;
this.captureDeviceInfo = captureDevice.getCaptureDeviceInfo();
}
for (MediaFormat supportedFormatForPayloadType
:supportedFormatsForPayloadType)
supportedFormats.add(supportedFormatForPayloadType);
}
return supportedFormats;
}
/**
* Sets the <tt>MediaFormat</tt> in which this <tt>MediaDevice</tt> is to
* capture data.
* Gets the <tt>MediaFormat</tt>s supported by a specific
* <tt>CaptureDevice</tt>.
*
* @param format the <tt>MediaFormat</tt> in which this <tt>MediaDevice</tt>
* is to capture data
* @param captureDevice the JMF <tt>CaptureDevice</tt> to retrieve the
* supported <tt>MediaFormat</tt>s of
* @return the <tt>MediaFormat</tt>s supported by the specified
* <tt>CaptureDevice</tt>
*/
public void setFormat(MediaFormat format)
private List<MediaFormat> getSupportedFormats(CaptureDevice captureDevice)
{
MediaType mediaType = getMediaType();
Set<Format> supportedFormats = new HashSet<Format>();
if (!mediaType.equals(format.getMediaType()))
throw new IllegalArgumentException("format");
/*
* We need javax.media.Format and we know how to convert MediaFormat to
* it only for MediaFormatImpl so assert early.
*/
MediaFormatImpl<? extends Format> mediaFormatImpl
= (MediaFormatImpl<? extends Format>) format;
Processor processor = getProcessor();
if (processor != null)
for (FormatControl formatControl : captureDevice.getFormatControls())
{
if ((processor.getState() < Processor.Configured)
&& !waitForState(processor, Processor.Configured))
{
// TODO
return;
}
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
Format[] supportedFormats = trackControl.getSupportedFormats();
if ((supportedFormats == null) || (supportedFormats.length < 1))
{
trackControl.setEnabled(false);
continue;
}
Format supportedFormat = null;
for (Format format : formatControl.getSupportedFormats())
switch (mediaType)
{
case AUDIO:
if (supportedFormats[0] instanceof AudioFormat)
{
if (FMJConditionals.FORCE_AUDIO_FORMAT != null)
trackControl
.setFormat(FMJConditionals.FORCE_AUDIO_FORMAT);
else
{
supportedFormat
= findFirstMatchingFormat(
supportedFormats,
mediaFormatImpl.getFormat());
}
}
if (format instanceof AudioFormat)
supportedFormats.add(format);
break;
case VIDEO:
if (supportedFormats[0] instanceof VideoFormat)
{
supportedFormat
= findFirstMatchingFormat(
supportedFormats,
mediaFormatImpl.getFormat());
if (supportedFormat != null)
supportedFormat
= assertSize((VideoFormat) supportedFormat);
}
if (format instanceof VideoFormat)
supportedFormats.add(format);
break;
}
if (supportedFormat == null)
trackControl.setEnabled(false);
else
trackControl.setFormat(supportedFormat);
}
}
List<MediaFormat> supportedMediaFormats
= new ArrayList<MediaFormat>(supportedFormats.size());
for (Format format : supportedFormats)
supportedMediaFormats.add(MediaFormatImpl.createInstance(format));
return supportedMediaFormats;
}
/**
* Starts the processing of media in this instance in a specific direction.
* Gets the <tt>MediaFormat</tt>s supported by a <tt>CaptureDevice</tt>
* judging by its <tt>CaptureDeviceInfo</tt>.
*
* @param direction a <tt>MediaDirection</tt> value which represents the
* direction of the processing of media to be started. For example,
* {@link MediaDirection#SENDRECV} to start both capture and playback of
* media in this instance or {@link MediaDirection#SENDONLY} to only start
* the capture of media in this instance
* @param captureDeviceInfo the <tt>CaptureDeviceInfo</tt> to retrieve the
* supported <tt>MediaFormat</tt>s of
* @return the <tt>MediaFormat</tt>s supported by the specified
* <tt>CaptureDeviceInfo</tt>
*/
public void start(MediaDirection direction)
private List<MediaFormat> getSupportedFormats(
CaptureDeviceInfo captureDeviceInfo)
{
if (direction == null)
throw new IllegalArgumentException("direction");
Format[] supportedFormats = captureDeviceInfo.getFormats();
MediaType mediaType = getMediaType();
List<MediaFormat> supportedMediaFormats
= new ArrayList<MediaFormat>(supportedFormats.length);
if (MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
for (Format format : supportedFormats)
{
Processor processor = getProcessor();
MediaFormat mediaFormat = MediaFormatImpl.createInstance(format);
if ((processor != null)
&& (processor.getState() != Processor.Started))
processor.start();
if ((mediaFormat != null)
&& mediaFormat.getMediaType().equals(mediaType))
supportedMediaFormats.add(mediaFormat);
}
return supportedMediaFormats;
}
/**
* Stops the processing of media in this instance in a specific direction.
* Sets the JMF <tt>CaptureDevice</tt> this instance wraps and provides a
* <tt>MediaDevice</tt> implementation for. Allows extenders to override in
* order to customize <tt>captureDevice</tt> including to replace it before
* it is set into this instance.
*
* @param direction a <tt>MediaDirection</tt> value which represents the
* direction of the processing of media to be stopped. For example,
* {@link MediaDirection#SENDRECV} to stop both capture and playback of
* media in this instance or {@link MediaDirection#SENDONLY} to only stop
* the capture of media in this instance
* @param captureDevice the JMF <tt>CaptureDevice</tt> this instance is to
* wrap and provide a <tt>MediaDevice</tt> implementation for
*/
public void stop(MediaDirection direction)
protected void setCaptureDevice(CaptureDevice captureDevice)
{
if (direction == null)
throw new IllegalArgumentException("direction");
if (MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
if ((processor != null)
&& (processor.getState() == Processor.Started))
processor.start();
if (this.captureDevice != captureDevice)
{
CaptureDevice oldValue = this.captureDevice;
this.captureDevice = captureDevice;
this.captureDeviceInfo = captureDevice.getCaptureDeviceInfo();
CaptureDevice newValue = captureDevice;
captureDeviceChanged(oldValue, newValue);
}
}
/**
* Waits for the specified JMF <tt>Processor</tt> to enter the specified
* <tt>state</tt> and returns <tt>true</tt> if <tt>processor</tt> has
* successfully entered <tt>state</tt> or <tt>false</tt> if <tt>process</tt>
* has failed to enter <tt>state</tt>.
* Gets a human-readable <tt>String</tt> representation of this instance.
*
* @param processor the JMF <tt>Processor</tt> to wait on
* @param state the state as defined by the respective <tt>Processor</tt>
* state constants to wait <tt>processor</tt> to enter
* @return <tt>true</tt> if <tt>processor</tt> has successfully entered
* <tt>state</tt>; otherwise, <tt>false</tt>
* @return a <tt>String</tt> providing a human-readable representation of
* this instance
*/
private static boolean waitForState(Processor processor, int state)
@Override
public String toString()
{
return new ProcessorUtility().waitForState(processor, state);
CaptureDeviceInfo captureDeviceInfo = getCaptureDeviceInfo();
return
(captureDeviceInfo == null)
? super.toString()
: captureDeviceInfo.toString();
}
}

@ -51,6 +51,40 @@ public static MediaFormat createInstance(Format format)
return null;
}
/**
* Creates a new <tt>MediaFormat</tt> instance for a specific JMF
* <tt>Format</tt> and assigns it a specific clock rate.
*
* @param format the JMF <tt>Format</tt> the new instance is to provide an
* implementation of <tt>MediaFormat</tt> for
* @param clockRate the clock rate of the new instance
* @return a new <tt>MediaFormat</tt> instance for the specified JMF
* <tt>Format</tt> and with the specified clock rate
*/
public static MediaFormatImpl<? extends Format> createInstance(
Format format,
double clockRate)
{
if (format instanceof AudioFormat)
{
AudioFormat audioFormat = (AudioFormat) format;
AudioFormat clockRateAudioFormat
= new AudioFormat(
audioFormat.getEncoding(),
clockRate,
audioFormat.getSampleSizeInBits(),
audioFormat.getChannels());
return
new AudioMediaFormatImpl(
(AudioFormat)
clockRateAudioFormat.intersects(audioFormat));
}
if (format instanceof VideoFormat)
return new VideoMediaFormatImpl((VideoFormat) format, clockRate);
return null;
}
/**
* Determines whether a specific set of format parameters is equal to
* another set of format parameters in the sense that they define an equal
@ -184,14 +218,29 @@ && formatParametersAreEqual(
/**
* Implements MediaFormat#getEncoding() and returns the encoding of the JMF
* <tt>Format</tt> that we are encapsulating here.
* <tt>Format</tt> that we are encapsulating here but it is the RFC-known
* encoding and not the internal JMF encoding.
*
* @return the encoding of the JMF
* <tt>Format</tt> that we are encapsulating here.
* @return the RFC-known encoding of the JMF <tt>Format</tt> that we are
* encapsulating
*/
public String getEncoding()
{
return format.getEncoding();
String encoding = getJMFEncoding();
if (encoding != null)
{
int encodingLength = encoding.length();
if (encodingLength > 3)
{
int rtpPos = encodingLength - 4;
if (encoding.substring(rtpPos).equalsIgnoreCase("/rtp"))
encoding = encoding.substring(0, rtpPos);
}
}
return encoding;
}
/**
@ -220,6 +269,18 @@ public Map<String, String> getFormatParameters()
: new HashMap<String, String>(formatParameters);
}
/**
* Gets the encoding of the JMF <tt>Format</tt> represented by this
* instance as it is known to JMF (in contrast to its RFC name).
*
* @return the encoding of the JMF <tt>Format</tt> represented by this
* instance as it is known to JMF (in contrast to its RFC name)
*/
public String getJMFEncoding()
{
return format.getEncoding();
}
/**
* Overrides Object#hashCode() because Object#equals(Object) is overridden.
*
@ -240,6 +301,6 @@ public int hashCode()
@Override
public String toString()
{
return getEncoding()+"/"+getClockRate();
return getEncoding() + "/" + ((long) getClockRate());
}
}

@ -38,7 +38,7 @@ public interface MediaService
* @param mediaType the media type (i.e. AUDIO or VIDEO) that we'd like
* to obtain the device list for.
*
* @return the list of <tt>MediaDevices</tt> currently known to handle the
* @return the list of <tt>MediaDevice</tt>s currently known to handle the
* specified <tt>mediaType</tt>.
*/
public List<MediaDevice> getDevices(MediaType mediaType);

@ -10,8 +10,7 @@
/**
* Defines the notification support informing about changes in the availability
* of visual <code>Components</code> representing video such as adding and
* removing.
* of visual <tt>Components</tt> representing video such as adding and removing.
*
* @author Lubomir Marinov
*/
@ -20,22 +19,21 @@ public interface VideoListener
{
/**
* Notifies that a visual <code>Component</code> representing video has been
* Notifies that a visual <tt>Component</tt> representing video has been
* added to the provider this listener has been added to.
*
* @param event a <code>VideoEvent</code> describing the added visual
* <code>Component</code> representing video and the provider it
* was added into
* @param event a <tt>VideoEvent</tt> describing the added visual
* <tt>Component</tt> representing video and the provider it was added into
*/
void videoAdded(VideoEvent event);
/**
* Notifies that a visual <code>Component</code> representing video has been
* Notifies that a visual <tt>Component</tt> representing video has been
* removed from the provider this listener has been added to.
*
* @param event a <code>VideoEvent</code> describing the removed visual
* <code>Component</code> representing video and the provider it
* was removed from
* @param event a <tt>VideoEvent</tt> describing the removed visual
* <tt>Component</tt> representing video and the provider it was removed
* from
*/
void videoRemoved(VideoEvent event);
}

Loading…
Cancel
Save