In neomedia, attempts to address various issues reported by Emil Ivov including exceptions when setting formats on MediaStream, 'mute' support for MediaStream in general.

cusax-fix
Lyubomir Marinov 16 years ago
parent 6323a44d57
commit bf6de7c3fc

@ -6,11 +6,11 @@
*/
package net.java.sip.communicator.impl.neomedia;
import javax.media.*;
import javax.media.format.*;
import javax.media.rtp.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.impl.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.event.*;
@ -52,7 +52,7 @@ public class AudioMediaStreamImpl
8000,
8,
1,
-1,
Format.NOT_SPECIFIED,
AudioFormat.SIGNED)
};
@ -109,20 +109,6 @@ public void addSoundLevelListener(SoundLevelListener listener)
// TODO Auto-generated method stub
}
/**
* Determines whether this <tt>AudioMediaStream</tt> is set to transmit
* silence instead of the audio being fed from its <tt>MediaDevice</tt>.
*
* @return <tt>true</tt> if this <tt>AudioMediaStream</tt> is set to
* transmit silence instead of the audio fed from its <tt>MediaDevice</tt>;
* <tt>false</tt>, otherwise
* @see AudioMediaStream#isMute()
*/
public boolean isMute()
{
return ((AudioMediaDeviceImpl) getDevice()).isMute();
}
/**
* Registers {@link #CUSTOM_CODEC_FORMATS} with a specific
* <tt>RTPManager</tt>.
@ -194,38 +180,6 @@ public void removeSoundLevelListener(SoundLevelListener listener)
// TODO Auto-generated method stub
}
/**
* Sets the <tt>MediaDevice</tt> that this stream should use to play back
* and capture media. Asserts that the specified <tt>device</tt> is an
* <tt>AudioMediaDeviceImpl</tt> because the implementation of
* <tt>AudioMediaStreamImpl</tt> depends on it.
*
* @param device the <tt>MediaDevice</tt> that this stream should use to
* play back and capture media
* @see MediaStreamImpl#setDevice(MediaDevice)
*/
@Override
public void setDevice(MediaDevice device)
{
super.setDevice((AudioMediaDeviceImpl) device);
}
/**
* Causes this <tt>AudioMediaStream</tt> to stop transmitting the audio
* being fed from this stream's <tt>MediaDevice</tt> and transmit silence
* instead.
*
* @param mute <tt>true</tt> to have this <tt>AudioMediaStream</tt> transmit
* silence instead of the actual audio data that it captures from its
* <tt>MediaDevice</tt>; <tt>false</tt> to transmit actual audio data
* captured from the <tt>MediaDevice</tt> of this <tt>AudioMediaStream</tt>
* @see AudioMediaStream#setMute(boolean)
*/
public void setMute(boolean mute)
{
((AudioMediaDeviceImpl) getDevice()).setMute(mute);
}
/**
* Starts sending the specified <tt>DTMFTone</tt> until the
* <tt>stopSendingDTMF()</tt> method is called. Callers should keep in mind

@ -274,8 +274,6 @@ public List<MediaDevice> getDevices(MediaType mediaType)
switch (mediaType)
{
case AUDIO:
device = new AudioMediaDeviceImpl(captureDeviceInfo);
break;
case VIDEO:
device = new MediaDeviceImpl(captureDeviceInfo, mediaType);
break;
@ -363,7 +361,7 @@ public MediaFormatFactory getFormatFactory()
private MediaDevice getNonSendAudioDevice()
{
if (nonSendAudioDevice == null)
nonSendAudioDevice = new AudioMediaDeviceImpl();
nonSendAudioDevice = new MediaDeviceImpl(MediaType.AUDIO);
return nonSendAudioDevice;
}
@ -392,6 +390,7 @@ void start()
deviceConfiguration.initialize();
encodingConfiguration.initializeFormatPreferences();
encodingConfiguration.registerCustomPackages();
encodingConfiguration.registerCustomCodecs();
}
/**

@ -55,6 +55,13 @@ public class MediaStreamImpl
*/
private MediaDeviceSession deviceSession;
/**
* The <tt>PropertyChangeListener</tt> which listens to
* {@link #deviceSession} and changes in the values of its
* {@link MediaDeviceSession#OUTPUT_DATA_SOURCE} property.
*/
private PropertyChangeListener deviceSessionPropertyChangeListener;
/**
* The <tt>MediaDirection</tt> in which this <tt>MediaStream</tt> is allowed
* to stream media.
@ -66,8 +73,8 @@ public class MediaStreamImpl
* <tt>RTPManager</tt> it utilizes of (dynamic) RTP payload types to
* <tt>MediaFormat</tt>s.
*/
private final Map<Integer, MediaFormat> dynamicRTPPayloadTypes
= new HashMap<Integer, MediaFormat>();
private final Map<Byte, MediaFormat> dynamicRTPPayloadTypes
= new HashMap<Byte, MediaFormat>();
/**
* The <tt>ReceiveStream</tt>s this instance plays back on its associated
@ -148,7 +155,7 @@ public MediaStreamImpl(StreamConnector connector, MediaDevice device)
* @see MediaStream#addDynamicRTPPayloadType(int, MediaFormat)
*/
public void addDynamicRTPPayloadType(
int rtpPayloadType,
byte rtpPayloadType,
MediaFormat format)
{
MediaFormatImpl<? extends Format> mediaFormatImpl
@ -156,7 +163,7 @@ public void addDynamicRTPPayloadType(
synchronized (dynamicRTPPayloadTypes)
{
dynamicRTPPayloadTypes.put(Integer.valueOf(rtpPayloadType), format);
dynamicRTPPayloadTypes.put(Byte.valueOf(rtpPayloadType), format);
if (rtpManager != null)
rtpManager
@ -206,7 +213,8 @@ private void closeSendStreams()
private void createSendStreams()
{
RTPManager rtpManager = getRTPManager();
DataSource dataSource = getDeviceSession().getOutputDataSource();
MediaDeviceSession deviceSession = getDeviceSession();
DataSource dataSource = deviceSession.getOutputDataSource();
int streamCount;
if (dataSource instanceof PushBufferDataSource)
@ -247,6 +255,13 @@ else if (dataSource instanceof PullDataSource)
try
{
rtpManager.createSendStream(dataSource, streamIndex);
if (logger.isTraceEnabled())
logger
.trace(
"Created send stream for data source "
+ dataSource
+ " and stream index "
+ streamIndex);
}
catch (IOException ioe)
{
@ -270,6 +285,19 @@ else if (dataSource instanceof PullDataSource)
}
}
sendStreamsAreCreated = true;
if (deviceSessionPropertyChangeListener == null)
deviceSessionPropertyChangeListener = new PropertyChangeListener()
{
public void propertyChange(PropertyChangeEvent event)
{
if (MediaDeviceSession
.OUTPUT_DATA_SOURCE.equals(event.getPropertyName()))
deviceSessionOutputDataSourceChanged();
}
};
deviceSession
.addPropertyChangeListener(deviceSessionPropertyChangeListener);
}
/**
@ -288,12 +316,19 @@ protected void deviceSessionChanged(
MediaDeviceSession oldValue,
MediaDeviceSession newValue)
{
if (sendStreamsAreCreated)
{
closeSendStreams();
if ((newValue != null) && (rtpManager != null))
createSendStreams();
}
recreateSendStreams();
}
/**
* Notifies this instance that the output <tt>DataSource</tt> of its
* <tt>MediaDeviceSession</tt> has changed. Recreates the
* <tt>SendStream</tt>s of this instance as necessary so that it, for
* example, continues streaming after the change if it was streaming before
* the change.
*/
private void deviceSessionOutputDataSourceChanged()
{
recreateSendStreams();
}
/**
@ -350,20 +385,20 @@ public MediaDirection getDirection()
* well-known associations reported by
* {@link MediaFormat#getRTPPayloadType()}.
*
* @return a <tt>Map</tt> of RTP payload type expressed as <tt>Integer</tt>
* to <tt>MediaFormat</tt> describing the existing (dynamic) associations in
* @return a <tt>Map</tt> of RTP payload type expressed as <tt>Byte</tt> to
* <tt>MediaFormat</tt> describing the existing (dynamic) associations in
* this instance of RTP payload types to <tt>MediaFormat</tt>s. The
* <tt>Map</tt> represents a snapshot of the existing associations at the
* time of the <tt>getDynamicRTPPayloadTypes()</tt> method call and
* modifications to it are not reflected on the internal storage
* @see MediaStream#getDynamicRTPPayloadTypes()
*/
public Map<Integer, MediaFormat> getDynamicRTPPayloadTypes()
public Map<Byte, MediaFormat> getDynamicRTPPayloadTypes()
{
synchronized (dynamicRTPPayloadTypes)
{
return
new HashMap<Integer, MediaFormat>(dynamicRTPPayloadTypes);
new HashMap<Byte, MediaFormat>(dynamicRTPPayloadTypes);
}
}
@ -511,7 +546,12 @@ private RTPManager getRTPManager()
*/
public boolean isMute()
{
return false;
MediaDevice device = getDevice();
return
(device instanceof MediaDeviceImpl)
? ((MediaDeviceImpl) device).isMute()
: false;
}
/**
@ -529,6 +569,29 @@ public boolean isStarted()
return started;
}
/**
* Recreates the <tt>SendStream</tt>s of this instance (i.e. of its
* <tt>RTPManager</tt>) as necessary. For example, if there was no attempt
* to create the <tt>SendStream</tt>s prior to the call, does nothing. If
* they were created prior to the call, closes them and creates them again.
* If they were not started prior to the call, does not start them after
* recreating them.
*/
private void recreateSendStreams()
{
if (sendStreamsAreCreated)
{
closeSendStreams();
if ((getDeviceSession() != null) && (rtpManager != null))
{
createSendStreams();
if (MediaDirection.SENDONLY.equals(startedDirection)
|| MediaDirection.SENDRECV.equals(startedDirection))
startSendStreams();
}
}
}
/**
* Registers any custom JMF <tt>Format</tt>s with a specific
* <tt>RTPManager</tt>. Extenders should override in order to register their
@ -544,7 +607,7 @@ protected void registerCustomCodecFormats(RTPManager rtpManager)
{
synchronized (dynamicRTPPayloadTypes)
{
for (Map.Entry<Integer, MediaFormat> dynamicRTPPayloadType
for (Map.Entry<Byte, MediaFormat> dynamicRTPPayloadType
: dynamicRTPPayloadTypes.entrySet())
{
MediaFormatImpl<? extends Format> mediaFormatImpl
@ -577,6 +640,7 @@ public void setDevice(MediaDevice device)
if (device == null)
throw new NullPointerException("device");
// Require AbstractMediaDevice for MediaDeviceSession support.
AbstractMediaDevice abstractMediaDevice = (AbstractMediaDevice) device;
if ((deviceSession == null) || (deviceSession.getDevice() != device))
@ -585,6 +649,10 @@ public void setDevice(MediaDevice device)
if (deviceSession != null)
{
if (deviceSessionPropertyChangeListener != null)
deviceSession
.removePropertyChangeListener(
deviceSessionPropertyChangeListener);
deviceSession.close();
deviceSession = null;
}
@ -691,6 +759,12 @@ public void setFormat(MediaFormat format)
*/
public void setMute(boolean mute)
{
MediaDevice device = getDevice();
if (device instanceof MediaDeviceImpl)
((MediaDeviceImpl) device).setMute(mute);
else
throw new IllegalStateException("device");
}
/**
@ -764,22 +838,7 @@ private void start(MediaDirection direction)
&& (!MediaDirection.SENDRECV.equals(startedDirection)
&& !MediaDirection.SENDONLY.equals(startedDirection)))
{
RTPManager rtpManager = getRTPManager();
Iterable<SendStream> sendStreams = rtpManager.getSendStreams();
if (sendStreams != null)
for (SendStream sendStream : sendStreams)
try
{
// TODO Are we sure we want to connect here?
sendStream.getDataSource().connect();
sendStream.start();
}
catch (IOException ioe)
{
logger
.warn("Failed to start stream " + sendStream, ioe);
}
startSendStreams();
getDeviceSession().start(MediaDirection.SENDONLY);
@ -844,6 +903,31 @@ else if (startedDirection == null)
}
}
/**
* Starts the <tt>SendStream</tt>s of the <tt>RTPManager</tt> of this
* <tt>MediaStream</tt>.
*/
private void startSendStreams()
{
RTPManager rtpManager = getRTPManager();
@SuppressWarnings("unchecked")
Iterable<SendStream> sendStreams = rtpManager.getSendStreams();
if (sendStreams != null)
for (SendStream sendStream : sendStreams)
try
{
// TODO Are we sure we want to connect here?
sendStream.getDataSource().connect();
sendStream.start();
}
catch (IOException ioe)
{
logger
.warn("Failed to start stream " + sendStream, ioe);
}
}
/**
* Stops all streaming and capturing in this <tt>MediaStream</tt> and closes
* and releases all open/allocated devices/resources. Has no effect if this

@ -15,30 +15,43 @@
*
* @author Emil Ivov
* @author Ken Larson
* @author Lubomir Marinov
*/
public class ProcessorUtility implements ControllerListener
{
private final Logger logger = Logger.getLogger(ProcessorUtility.class);
/**
* The object that we use for syncing when waiting for a processor
* to enter a specific state.
* The <tt>Logger</tt> used by the <tt>ProcessorUtility</tt> class and its
* instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(ProcessorUtility.class);
/**
* The <tt>Object</tt> used for syncing when waiting for a processor to
* enter a specific state.
*/
private final Object stateLock = new Object();
/**
* The indicator which determines whether the waiting of this instance on a
* processor for it to enter a specific state has failed.
*/
private boolean failed = false;
/**
* Default constructor, creates an instance of the of the Processor utility.
* Initializes a new <tt>ProcessorUtility</tt> instance.
*/
public ProcessorUtility()
{
}
/**
* Returns the object that we use for syncing when waiting for a processor
* Gets the <tt>Object</tt> to use for syncing when waiting for a processor
* to enter a specific state.
* @return Integer
*
* @return the <tt>Object</tt> to use for syncing when waiting for a
* processor to enter a specific state
*/
private Object getStateLock()
{
@ -125,11 +138,20 @@ else if (state == Processor.Realized)
}
catch (InterruptedException ie)
{
logger
.warn(
"Failed while waiting on Processor "
+ processor
+ " for state "
+ state,
ie);
processor.removeControllerListener(this);
return false;
}
}
}
processor.removeControllerListener(this);
return !failed;
}
}

@ -1,118 +0,0 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import javax.media.*;
import javax.media.protocol.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.service.neomedia.*;
/**
* Extends <tt>MediaDeviceImpl</tt> with audio-specific functionality.
*
* @author Lubomir Marinov
*/
public class AudioMediaDeviceImpl
extends MediaDeviceImpl
{
/**
* Initializes a new <tt>AudioMediaDeviceImpl</tt> instance with
* <tt>MediaDirection</tt> which does not allow sending i.e. the new
* instance cannot be used to capture audio.
*/
public AudioMediaDeviceImpl()
{
super(MediaType.AUDIO);
}
/**
* Initializes a new <tt>AudioMediaDeviceImpl</tt> instance which is to
* provide an implementation of <tt>MediaDevice</tt> for a specific audio
* <tt>CaptureDevice</tt>.
*
* @param captureDevice the audio <tt>CaptureDevice</tt> the new instance is
* to provide an implementation of <tt>MediaDevice</tt> for
*/
public AudioMediaDeviceImpl(CaptureDevice captureDevice)
{
super(captureDevice, MediaType.AUDIO);
}
/**
* Initializes a new <tt>AudioMediaDeviceImpl</tt> instance which is to
* provide an implementation of <tt>MediaDevice</tt> for an audio
* <tt>CaptureDevice</tt> with a specific <tt>CaptureDeviceInfo</tt>.
*
* @param captureDeviceInfo the <tt>CaptureDeviceInfo</tt> of the audio
* <tt>CaptureDevice</tt> the new instance is to provide an implementation
* of <tt>MediaDevice</tt> for
*/
public AudioMediaDeviceImpl(CaptureDeviceInfo captureDeviceInfo)
{
super(captureDeviceInfo, MediaType.AUDIO);
}
/**
* Determines whether this <tt>MediaDevice</tt> will provide silence instead
* of actual captured data next time it is read.
*
* @return <tt>true</tt> if this <tt>MediaDevice</tt> will provide silence
* instead of actual captured data next time it is read; <tt>false</tt>,
* otherwise
*/
public boolean isMute()
{
CaptureDevice captureDevice = getCaptureDevice(false);
if (captureDevice instanceof MutePushBufferDataSource)
return ((MutePushBufferDataSource) captureDevice).isMute();
/*
* If there is no underlying CaptureDevice, this instance is mute
* because it cannot capture any audio.
*/
return !getDirection().allowsSending();
}
/**
* Sets the JMF <tt>CaptureDevice</tt> this instance wraps and provides a
* <tt>MediaDevice</tt> implementation for. Tries to enable muting.
*
* @param captureDevice the JMF <tt>CaptureDevice</tt> this instance is to
* wrap and provide a <tt>MediaDevice</tt> implementation for
* @see MediaDeviceImpl#setCaptureDevice(CaptureDevice)
*/
@Override
protected void setCaptureDevice(CaptureDevice captureDevice)
{
if (captureDevice instanceof PushBufferDataSource)
captureDevice
= new MutePushBufferDataSource(
(PushBufferDataSource) captureDevice);
super.setCaptureDevice(captureDevice);
}
/**
* Sets the indicator which determines whether this <tt>MediaDevice</tt>
* will start providing silence instead of actual captured data next time it
* is read.
*
* @param mute <tt>true</tt> to have this <tt>MediaDevice</tt> start
* providing silence instead of actual captured data next time it is read;
* otherwise, <tt>false</tt>
*/
public void setMute(boolean mute)
{
CaptureDevice captureDevice = getCaptureDevice();
if (captureDevice instanceof MutePushBufferDataSource)
((MutePushBufferDataSource) captureDevice).setMute(mute);
}
}

@ -33,21 +33,23 @@ public class AudioMixerMediaDevice
private AudioMixer audioMixer;
/**
* The actual <tt>AudioMediaDeviceImpl</tt> wrapped by this instance for the
* The actual <tt>MediaDeviceImpl</tt> wrapped by this instance for the
* purposes of audio mixing and used by {@link #audioMixer} as its
* <tt>CaptureDevice</tt>.
*/
private final AudioMediaDeviceImpl device;
private final MediaDeviceImpl device;
/**
* Initializes a new <tt>AudioMixerMediaDevice</tt> instance which is to
* enable audio mixing on a specific <tt>AudioMediaDeviceImpl</tt>.
* enable audio mixing on a specific <tt>MediaDeviceImpl</tt>.
*
* @param device the <tt>AudioMediaDeviceImpl</tt> which the new instance is
* to enable audio mixing on
* @param device the <tt>MediaDeviceImpl</tt> which the new instance is to
* enable audio mixing on
*/
public AudioMixerMediaDevice(AudioMediaDeviceImpl device)
public AudioMixerMediaDevice(MediaDeviceImpl device)
{
if (!MediaType.AUDIO.equals(device.getMediaType()))
throw new IllegalArgumentException("device");
/*
* AudioMixer is initialized with a CaptureDevice so we have to be sure

@ -401,6 +401,28 @@ public List<MediaFormat> getSupportedFormats()
return supportedFormats;
}
/**
* Determines whether this <tt>MediaDevice</tt> will provide silence instead
* of actual captured data next time it is read.
*
* @return <tt>true</tt> if this <tt>MediaDevice</tt> will provide silence
* instead of actual captured data next time it is read; <tt>false</tt>,
* otherwise
*/
public boolean isMute()
{
CaptureDevice captureDevice = getCaptureDevice(false);
if (captureDevice instanceof MutePushBufferDataSource)
return ((MutePushBufferDataSource) captureDevice).isMute();
/*
* If there is no underlying CaptureDevice, this instance is mute
* because it cannot capture any media.
*/
return !getDirection().allowsSending();
}
/**
* Sets the JMF <tt>CaptureDevice</tt> this instance wraps and provides a
* <tt>MediaDevice</tt> implementation for. Allows extenders to override in
@ -412,6 +434,12 @@ public List<MediaFormat> getSupportedFormats()
*/
protected void setCaptureDevice(CaptureDevice captureDevice)
{
// Try to enable mute support on the specified CaptureDevice.
if (captureDevice instanceof PushBufferDataSource)
captureDevice
= new MutePushBufferDataSource(
(PushBufferDataSource) captureDevice);
if (this.captureDevice != captureDevice)
{
CaptureDevice oldValue = this.captureDevice;
@ -425,6 +453,23 @@ protected void setCaptureDevice(CaptureDevice captureDevice)
}
}
/**
* Sets the indicator which determines whether this <tt>MediaDevice</tt>
* will start providing silence instead of actual captured data next time it
* is read.
*
* @param mute <tt>true</tt> to have this <tt>MediaDevice</tt> start
* providing silence instead of actual captured data next time it is read;
* otherwise, <tt>false</tt>
*/
public void setMute(boolean mute)
{
CaptureDevice captureDevice = getCaptureDevice();
if (captureDevice instanceof MutePushBufferDataSource)
((MutePushBufferDataSource) captureDevice).setMute(mute);
}
/**
* Gets a human-readable <tt>String</tt> representation of this instance.
*

@ -30,7 +30,7 @@
* @author Lubomir Marinov
*/
public class MediaDeviceSession
implements ControllerListener
extends PropertyChangeNotifier
{
/**
@ -40,6 +40,14 @@ public class MediaDeviceSession
private static final Logger logger
= Logger.getLogger(MediaDeviceSession.class);
/**
* The name of the <tt>MediaDeviceSession</tt> instance property the value
* of which represents the output <tt>DataSource</tt> of the
* <tt>MediaDeviceSession</tt> instance which provides the captured (RTP)
* data to be sent by <tt>MediaStream</tt> to <tt>MediaStreamTarget</tt>.
*/
public static final String OUTPUT_DATA_SOURCE = "OUTPUT_DATA_SOURCE";
/**
* The JMF <tt>DataSource</tt> of {@link #device} through which this
* instance accesses the media captured by it.
@ -58,6 +66,19 @@ public class MediaDeviceSession
*/
private final AbstractMediaDevice device;
/**
* The last JMF <tt>Format</tt> set to this instance by a call to its
* {@link #setFormat(MediaFormat) and to be set as the output format of
* {@link #processor}.
*/
private Format format;
/**
* The <tt>ControllerListener</tt> which listens to the <tt>Player</tt>
* instances in {@link #players} for <tt>ControllerEvent</tt>s.
*/
private ControllerListener playerControllerListener;
/**
* The <tt>Player</tt>s rendering <tt>ReceiveStream</tt>s on the
* <tt>MediaDevice</tt> represented by this instance. Associated with
@ -75,6 +96,32 @@ public class MediaDeviceSession
*/
private Processor processor;
/**
* The <tt>ControllerListener</tt> which listens to {@link #processor} for
* <tt>ControllerEvent</tt>s.
*/
private ControllerListener processorControllerListener;
/**
* The indicator which determines whether {@link #processor} has received
* a <tt>ControllerClosedEvent</tt> at an unexpected time in its execution.
* A value of <tt>false</tt> does not mean that <tt>processor</tt> exists
* or that it is not closed, it just means that if <tt>processor</tt> failed
* to be initialized or it received a <tt>ControllerClosedEvent</tt>, it was
* at an expected time of its execution and that the fact in question was
* reflected, for example, by setting <tt>processor</tt> to <tt>null</tt>.
* If there is no <tt>processorIsPrematurelyClosed</tt> field and
* <tt>processor</tt> is set to <tt>null</tt> or left existing after the
* receipt of <tt>ControllerClosedEvent</tt>, it will either lead to not
* firing a <tt>PropertyChangeEvent</tt> for <tt>OUTPUT_DATA_SOURCE</tt>
* when it has actually changed and, consequently, cause the
* <tt>SendStream</tt>s of <tt>MediaStreamImpl</tt> to not be recreated or
* it will be impossible to detect that <tt>processor</tt> cannot have its
* format set and will thus be left broken even for subsequent calls to
* {@link #setFormat(MediaFormat)}.
*/
private boolean processorIsPrematurelyClosed;
/**
* The <tt>ReceiveStream</tt>s rendered by this instance on its associated
* <tt>MediaDevice</tt>. Mapped to <tt>DataSource</tt> because extenders may
@ -170,7 +217,28 @@ protected void addReceiveStream(
exception);
else
{
player.addControllerListener(this);
if (playerControllerListener == null)
playerControllerListener = new ControllerListener()
{
/**
* Notifies this <tt>ControllerListener</tt> that
* the <tt>Controller</tt> which it is registered
* with has generated an event.
*
* @param event the <tt>ControllerEvent</tt>
* specifying the <tt>Controller</tt> which is the
* source of the event and the very type of the
* event
* @see ControllerListener#controllerUpdate(
* ControllerEvent)
*/
public void controllerUpdate(ControllerEvent event)
{
playerControllerUpdate(event);
}
};
player.addControllerListener(playerControllerListener);
player.realize();
players.put(receiveStreamDataSource, player);
@ -260,6 +328,45 @@ public void close()
{
disposePlayers();
disconnectCaptureDevice();
closeProcessor();
}
/**
* Makes sure {@link #processor} is closed.
*/
private void closeProcessor()
{
if (processor != null)
{
if (processorControllerListener != null)
processor.removeControllerListener(processorControllerListener);
processor.stop();
if (processor.getState() == Processor.Realized)
{
DataSource dataOutput = processor.getDataOutput();
if (dataOutput != null)
dataOutput.disconnect();
}
processor.deallocate();
processor.close();
processorIsPrematurelyClosed = false;
/*
* Once the processor uses the captureDevice, the captureDevice has
* to be reconnected on its next use.
*/
disconnectCaptureDevice();
}
}
/**
* Makes sure {@link #captureDevice} is disconnected.
*/
private void disconnectCaptureDevice()
{
if (captureDevice != null)
{
/*
@ -287,43 +394,6 @@ public void close()
captureDevice.disconnect();
captureDeviceIsConnected = false;
}
if (processor != null)
{
processor.stop();
if (processor.getState() == Processor.Realized)
{
DataSource dataOutput = processor.getDataOutput();
if (dataOutput != null)
dataOutput.disconnect();
}
processor.deallocate();
processor.close();
}
}
/**
* Notifies this <tt>ControllerListener</tt> that the <tt>Controller</tt>
* which it is registered with has generated an event.
*
* @param event the <tt>ControllerEvent</tt> specifying the
* <tt>Controller</tt> which is the source of the event and the very type of
* the event
* @see ControllerListener#controllerUpdate(ControllerEvent)
*/
public void controllerUpdate(ControllerEvent event)
{
if (event instanceof RealizeCompleteEvent)
{
Player player = (Player) event.getSourceController();
if (player != null)
{
player.start();
realizeComplete(player);
}
}
}
/**
@ -346,6 +416,8 @@ protected void disposePlayer(Player player)
break;
}
if (playerControllerListener != null)
player.removeControllerListener(playerControllerListener);
player.stop();
player.deallocate();
player.close();
@ -383,8 +455,15 @@ private static Format findFirstMatchingFormat(
Format format)
{
for (Format match : formats)
{
/*
* TODO Is the encoding enough? We've been explicitly told what
* format to use so it may be that its non-encoding attributes which
* have been specified are also necessary.
*/
if (match.isSameEncoding(format))
return match;
}
return null;
}
@ -468,7 +547,9 @@ public MediaFormat getFormat()
{
Processor processor = getProcessor();
if (processor != null)
if ((processor != null)
&& (this.processor == processor)
&& !processorIsPrematurelyClosed)
{
MediaType mediaType = getMediaType();
@ -580,33 +661,46 @@ private Processor getProcessor()
.error(
"Failed to create Processor for " + captureDevice,
exception);
else if (waitForState(processor, Processor.Configured))
else
{
try
if (processorControllerListener == null)
processorControllerListener = new ControllerListener()
{
/**
* Notifies this <tt>ControllerListener</tt> that
* the <tt>Controller</tt> which it is registered
* with has generated an event.
*
* @param event the <tt>ControllerEvent</tt>
* specifying the <tt>Controller</tt> which is the
* source of the event and the very type of the
* event
* @see ControllerListener#controllerUpdate(
* ControllerEvent)
*/
public void controllerUpdate(ControllerEvent event)
{
processorControllerUpdate(event);
}
};
processor
.addControllerListener(processorControllerListener);
if (waitForState(processor, Processor.Configured))
{
exception = null;
processor
.setContentDescriptor(
new ContentDescriptor(
ContentDescriptor.RAW_RTP));
this.processor = processor;
processorIsPrematurelyClosed = false;
}
catch (NotConfiguredError nce)
else
{
// TODO
exception = nce;
if (processorControllerListener != null)
processor
.removeControllerListener(
processorControllerListener);
processor = null;
}
if (exception != null)
logger
.error(
"Failed to set ContentDescriptor to Processor.",
exception);
else
this.processor = processor;
}
else
processor = null;
}
}
return processor;
@ -624,7 +718,9 @@ public List<MediaFormat> getSupportedFormats()
Processor processor = getProcessor();
Set<Format> supportedFormats = new HashSet<Format>();
if (processor != null)
if ((processor != null)
&& (this.processor == processor)
&& !processorIsPrematurelyClosed)
{
MediaType mediaType = getMediaType();
@ -656,6 +752,80 @@ public List<MediaFormat> getSupportedFormats()
return supportedMediaFormats;
}
/**
* Gets notified about <tt>ControllerEvent</tt>s generated by the
* <tt>Player</tt> instances in {@link #players}.
*
* @param event the <tt>ControllerEvent</tt> specifying the
* <tt>Controller</tt> which is the source of the event and the very type of
* the event
*/
private void playerControllerUpdate(ControllerEvent event)
{
if (event instanceof RealizeCompleteEvent)
{
Player player = (Player) event.getSourceController();
if (player != null)
{
player.start();
realizeComplete(player);
}
}
}
/**
* Gets notified about <tt>ControllerEvent</tt>s generated by
* {@link #processor}.
*
* @param event the <tt>ControllerEvent</tt> specifying the
* <tt>Controller</tt> which is the source of the event and the very type of
* the event
*/
private void processorControllerUpdate(ControllerEvent event)
{
if (event instanceof ConfigureCompleteEvent)
{
Processor processor = (Processor) event.getSourceController();
if (processor != null)
{
try
{
processor
.setContentDescriptor(
new ContentDescriptor(
ContentDescriptor.RAW_RTP));
}
catch (NotConfiguredError nce)
{
logger
.error(
"Failed to set ContentDescriptor to Processor.",
nce);
}
if (format != null)
setFormat(processor, format);
}
}
else if (event instanceof ControllerClosedEvent)
{
Processor processor = (Processor) event.getSourceController();
/*
* If everything goes according to plan, we should've removed the
* ControllerListener from the processor by now.
*/
logger.warn(event);
// TODO Should the access to processor be synchronized?
if ((processor != null) && (this.processor == processor))
processorIsPrematurelyClosed = true;
}
}
/**
* Notifies this instance that a specific <tt>Player</tt> of remote content
* has generated a <tt>RealizeCompleteEvent</tt>. Allows extenders to carry
@ -714,72 +884,162 @@ public void setFormat(MediaFormat format)
MediaFormatImpl<? extends Format> mediaFormatImpl
= (MediaFormatImpl<? extends Format>) format;
Processor processor = getProcessor();
this.format = mediaFormatImpl.getFormat();
/*
* If the processor is after Configured, setting a different format will
* silently fail. Recreate the processor in order to be able to set the
* different format.
*/
if (processor != null)
{
if ((processor.getState() < Processor.Configured)
&& !waitForState(processor, Processor.Configured))
{
// TODO
return;
}
int processorState = processor.getState();
if (processorState == Processor.Configured)
setFormat(processor, this.format);
else if (processorIsPrematurelyClosed
|| ((processorState > Processor.Configured)
&& !format.equals(getFormat())))
setProcessor(null);
}
}
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
/**
* Sets the JMF <tt>Format</tt> in which a specific <tt>Processor</tt> is to
* output media data.
*
* @param processor the <tt>Processor</tt> to set the output <tt>Format</tt>
* of
* @param format the JMF <tt>Format</tt> to set to <tt>processor</tt>
*/
private void setFormat(Processor processor, Format format)
{
TrackControl[] trackControls = processor.getTrackControls();
MediaType mediaType = getMediaType();
Format[] supportedFormats = trackControl.getSupportedFormats();
for (int trackIndex = 0;
trackIndex < trackControls.length;
trackIndex++)
{
TrackControl trackControl = trackControls[trackIndex];
if ((supportedFormats == null) || (supportedFormats.length < 1))
{
trackControl.setEnabled(false);
continue;
}
if (!trackControl.isEnabled())
continue;
Format supportedFormat = null;
Format[] supportedFormats = trackControl.getSupportedFormats();
switch (mediaType)
if ((supportedFormats == null) || (supportedFormats.length < 1))
{
trackControl.setEnabled(false);
continue;
}
Format supportedFormat = null;
switch (mediaType)
{
case AUDIO:
if (supportedFormats[0] instanceof AudioFormat)
{
case AUDIO:
if (supportedFormats[0] instanceof AudioFormat)
{
if (FMJConditionals.FORCE_AUDIO_FORMAT != null)
trackControl
.setFormat(FMJConditionals.FORCE_AUDIO_FORMAT);
else
{
supportedFormat
= findFirstMatchingFormat(
supportedFormats,
mediaFormatImpl.getFormat());
}
}
break;
case VIDEO:
if (supportedFormats[0] instanceof VideoFormat)
if (FMJConditionals.FORCE_AUDIO_FORMAT != null)
trackControl
.setFormat(FMJConditionals.FORCE_AUDIO_FORMAT);
else
{
supportedFormat
= findFirstMatchingFormat(
supportedFormats,
mediaFormatImpl.getFormat());
if (supportedFormat != null)
supportedFormat
= assertSize((VideoFormat) supportedFormat);
= findFirstMatchingFormat(supportedFormats, format);
/*
* We've failed to find a supported format so try to use
* whatever we've been told and, if it fails, the caller
* will at least know why.
*/
if (supportedFormat == null)
supportedFormat = format;
}
break;
}
break;
case VIDEO:
if (supportedFormats[0] instanceof VideoFormat)
{
supportedFormat
= findFirstMatchingFormat(supportedFormats, format);
/*
* We've failed to find a supported format so try to use
* whatever we've been told and, if it fails, the caller
* will at least know why.
*/
if (supportedFormat == null)
supportedFormat = format;
if (supportedFormat != null)
supportedFormat
= assertSize((VideoFormat) supportedFormat);
}
break;
}
if (supportedFormat == null)
trackControl.setEnabled(false);
else
trackControl.setFormat(supportedFormat);
if (supportedFormat == null)
trackControl.setEnabled(false);
else
{
Format setFormat = trackControl.setFormat(supportedFormat);
if (setFormat == null)
logger
.error(
"Failed to set format of track "
+ trackIndex
+ " to "
+ supportedFormat
+ ". Processor is in state "
+ processor.getState());
else if (setFormat != supportedFormat)
logger
.warn(
"Failed to change format of track "
+ trackIndex
+ " from "
+ setFormat
+ " to "
+ supportedFormat
+ ". Processor is in state "
+ processor.getState());
else if (logger.isTraceEnabled())
logger
.trace(
"Set format of track "
+ trackIndex
+ " to "
+ setFormat);
}
}
}
/**
* Sets the JMF <tt>Processor</tt> which is to transcode
* {@link #captureDevice} into the format of this instance.
*
* @param processor the JMF <tt>Processor</tt> which is to transcode
* {@link #captureDevice} into the format of this instance
*/
private void setProcessor(Processor processor)
{
if (this.processor != processor)
{
closeProcessor();
this.processor = processor;
/*
* Since the processor has changed, its output DataSource known to
* the public has also changed.
*/
firePropertyChange(OUTPUT_DATA_SOURCE, null, null);
}
}
/**
* Starts the processing of media in this instance in a specific direction.
*
@ -822,8 +1082,8 @@ public void stop(MediaDirection direction)
if (MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
if ((processor != null)
&& (processor.getState() == Processor.Started))
processor.start();
&& (processor.getState() > Processor.Configured))
processor.stop();
}
/**

@ -176,7 +176,7 @@ public interface MediaStream
* <tt>MediaStream</tt> with <tt>rtpPayloadType</tt>
*/
public void addDynamicRTPPayloadType(
int rtpPayloadType,
byte rtpPayloadType,
MediaFormat format);
/**
@ -187,14 +187,14 @@ public void addDynamicRTPPayloadType(
* well-known associations reported by
* {@link MediaFormat#getRTPPayloadType()}.
*
* @return a <tt>Map</tt> of RTP payload type expressed as <tt>Integer</tt>
* to <tt>MediaFormat</tt> describing the existing (dynamic) associations in
* @return a <tt>Map</tt> of RTP payload type expressed as <tt>Byte</tt> to
* <tt>MediaFormat</tt> describing the existing (dynamic) associations in
* this instance of RTP payload types to <tt>MediaFormat</tt>s. The
* <tt>Map</tt> represents a snapshot of the existing associations at the
* time of the <tt>getDynamicRTPPayloadTypes()</tt> method call and
* modifications to it are not reflected on the internal storage
*/
public Map<Integer, MediaFormat> getDynamicRTPPayloadTypes();
public Map<Byte, MediaFormat> getDynamicRTPPayloadTypes();
/**
* Sets the direction in which media in this <tt>MediaStream</tt> is to be

Loading…
Cancel
Save