Addresses a possible deadlock in video calls.

cusax-fix
Lyubomir Marinov 14 years ago
parent 95be6b19ee
commit e46f74f442

@ -787,26 +787,27 @@ private void handleVideoEvent(final VideoEvent event)
logger.trace("UI video event received originated in: "
+ event.getOrigin() + " and is of type: " + event.getType());
synchronized (videoContainers)
if ((event != null) && !event.isConsumed())
{
if ((event != null) && !event.isConsumed())
{
Component video = event.getVisualComponent();
int origin = event.getOrigin();
Component video = event.getVisualComponent();
synchronized (videoContainers)
{
switch (event.getType())
{
case VideoEvent.VIDEO_ADDED:
if(event.getOrigin() == VideoEvent.LOCAL)
if (origin == VideoEvent.LOCAL)
{
this.localVideo = video;
this.closeButton = new CloseButton();
}
else if(event.getOrigin() == VideoEvent.REMOTE)
else if (origin == VideoEvent.REMOTE)
{
this.remoteVideo = video;
}
addMouseListeners(event.getOrigin());
addMouseListeners(origin);
/*
* Let the creator of the local visual Component know it
@ -816,16 +817,18 @@ else if(event.getOrigin() == VideoEvent.REMOTE)
break;
case VideoEvent.VIDEO_REMOVED:
if (event.getOrigin() == VideoEvent.LOCAL &&
localVideo == video)
if (origin == VideoEvent.LOCAL)
{
this.localVideo = null;
this.closeButton = null;
if (localVideo == video)
{
this.localVideo = null;
this.closeButton = null;
}
}
else if(event.getOrigin() == VideoEvent.REMOTE &&
remoteVideo == video)
else if (origin == VideoEvent.REMOTE)
{
this.remoteVideo = null;
if (remoteVideo == video)
this.remoteVideo = null;
}
break;
}

@ -272,7 +272,7 @@ public MediaDevice getDefaultDevice(
break;
case VIDEO:
captureDeviceInfo
= getDeviceConfiguration().getVideoCaptureDevice(useCase);
= getDeviceConfiguration().getVideoCaptureDevice(useCase);
break;
default:
captureDeviceInfo = null;
@ -314,10 +314,12 @@ public MediaDevice getDefaultDevice(
}
}
//Don't use the device in case the user has disabled all codecs for that
//kind of media.
/*
* Don't use the device in case the user has disabled all codecs for
* that kind of media.
*/
if ((defaultDevice != null)
&& (defaultDevice.getSupportedFormats().isEmpty()))
&& (defaultDevice.getSupportedFormats().isEmpty()))
{
defaultDevice = null;
}

@ -1392,7 +1392,7 @@ public void setDevice(MediaDevice device)
deviceSessionPropertyChangeListener);
// keep player active
deviceSession.setDisposePlayerWhenClose(false);
deviceSession.setDisposePlayerOnClose(false);
deviceSession.close();
deviceSession = null;
}

@ -236,9 +236,18 @@ public int compare(FormatInfo info0, FormatInfo info1)
/**
* The facility which aids this instance in managing a list of
* <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them.
* <p>
* Since the <tt>videoNotifierSupport</tt> of this
* <tt>VideoMediaStreamImpl</tt> just forwards the <tt>VideoEvent</tt>s of
* the associated <tt>VideoMediaDeviceSession</tt> at the time of this
* writing, it does not make sense to have <tt>videoNotifierSupport</tt>
* executing asynchronously because it does not know whether it has to wait
* for the delivery of the <tt>VideoEvent</tt>s and thus it has to default
* to waiting anyway.
* </p>
*/
private final VideoNotifierSupport videoNotifierSupport
= new VideoNotifierSupport(this);
= new VideoNotifierSupport(this, true);
/**
* Initializes a new <tt>VideoMediaStreamImpl</tt> instance which will use
@ -408,7 +417,8 @@ public void videoAdded(VideoEvent e)
if (fireVideoEvent(
e.getType(),
e.getVisualComponent(),
e.getOrigin()))
e.getOrigin(),
true))
e.consume();
}
@ -429,7 +439,7 @@ public void videoRemoved(VideoEvent e)
public void videoUpdate(VideoEvent e)
{
fireVideoEvent(e);
fireVideoEvent(e, true);
}
};
}
@ -480,6 +490,9 @@ public void disposeLocalVisualComponent(Component component)
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
* @return <tt>true</tt> if this event and, more specifically, the visual
* <tt>Component</tt> it describes have been consumed and should be
* considered owned, referenced (which is important because
@ -487,9 +500,8 @@ public void disposeLocalVisualComponent(Component component)
* otherwise, <tt>false</tt>
*/
protected boolean fireVideoEvent(
int type,
Component visualComponent,
int origin)
int type, Component visualComponent, int origin,
boolean wait)
{
if (logger.isTraceEnabled())
logger
@ -500,7 +512,9 @@ protected boolean fireVideoEvent(
+ VideoEvent.originToString(origin));
return
videoNotifierSupport.fireVideoEvent(type, visualComponent, origin);
videoNotifierSupport.fireVideoEvent(
type, visualComponent, origin,
wait);
}
/**
@ -509,10 +523,13 @@ protected boolean fireVideoEvent(
*
* @param event the <tt>VideoEvent</tt> to be fired to the
* <tt>VideoListener</tt>s registered with this instance
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
*/
protected void fireVideoEvent(VideoEvent event)
protected void fireVideoEvent(VideoEvent event, boolean wait)
{
videoNotifierSupport.fireVideoEvent(event);
videoNotifierSupport.fireVideoEvent(event, wait);
}
/**

@ -76,37 +76,33 @@ public ImageStreamingAuto() throws Exception
for(ScreenDevice screen : screens)
{
Dimension size = screenSize != null ? screenSize : screen.getSize();
Format formats[]= new Format[]
{
new AVFrameFormat(
size,
Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_ARGB,
Format.NOT_SPECIFIED),
new RGBFormat(
size, // size
Format.NOT_SPECIFIED, // maxDataLength
Format.byteArray, // dataType
Format.NOT_SPECIFIED, // frameRate
32, // bitsPerPixel
2 /* red */, 3 /* green */, 4 /* blue */)
};
Format formats[]
= new Format[]
{
new AVFrameFormat(
size,
Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_ARGB,
Format.NOT_SPECIFIED),
new RGBFormat(
size, // size
Format.NOT_SPECIFIED, // maxDataLength
Format.byteArray, // dataType
Format.NOT_SPECIFIED, // frameRate
32, // bitsPerPixel
2 /* red */, 3 /* green */, 4 /* blue */)
};
CaptureDeviceInfo devInfo
= new CaptureDeviceInfo(
name + " " + i,
new MediaLocator(LOCATOR_PROTOCOL + ":" + i),
formats);
/* add to JMF device manager */
CaptureDeviceManager.addDevice(devInfo);
i++;
if(multipleMonitorOneScreen)
{
break;
}
}
CaptureDeviceManager.commit();

@ -174,9 +174,9 @@ public class MediaDeviceSession
private MediaDirection startedDirection = MediaDirection.INACTIVE;
/**
* If the player have to be disposed when we #close() this instance.
* If the player have to be disposed when we {@link #close()} this instance.
*/
private boolean disposePlayerWhenClose = true;
private boolean disposePlayerOnClose = true;
/**
* Whether output size has changed after latest processor config.
@ -204,9 +204,9 @@ protected MediaDeviceSession(AbstractMediaDevice device)
*
* @param dispose value to set
*/
public void setDisposePlayerWhenClose(boolean dispose)
public void setDisposePlayerOnClose(boolean dispose)
{
disposePlayerWhenClose = dispose;
disposePlayerOnClose = dispose;
}
/**
@ -337,11 +337,9 @@ public void close()
disconnectCaptureDevice();
closeProcessor();
if(disposePlayerWhenClose)
{
// playback
// playback
if (disposePlayerOnClose)
disposePlayer();
}
}
/**
@ -363,8 +361,16 @@ private void closeProcessor()
if (processor.getState() == Processor.Realized)
{
DataSource dataOutput = processor.getDataOutput();
DataSource dataOutput;
try
{
dataOutput = processor.getDataOutput();
}
catch (NotRealizedError nre)
{
dataOutput = null;
}
if (dataOutput != null)
dataOutput.disconnect();
}
@ -590,11 +596,23 @@ private void disconnectCaptureDevice()
*/
private void disposePlayer()
{
Player player;
synchronized (playbackSyncRoot)
{
if (player != null)
disposePlayer(player);
/*
* If #disposePlayer(Player) is just executed inside the
* synchronized block protected by #playbackSyncRoot, it practically
* locks the rest of the state protected by the same synchronization
* root. But that is not necessary because #disposePlayer(Player)
* will protect #player when necessary. Anyway, the change from the
* described behavior to the current one has been made while solving
* a deadlock.
*/
player = this.player;
}
if (player != null)
disposePlayer(player);
}
/**
@ -1815,7 +1833,7 @@ private static boolean waitForState(Processor processor, int state)
*/
protected void transferRenderingSession(MediaDeviceSession session)
{
if(session.disposePlayerWhenClose)
if (session.disposePlayerOnClose)
{
logger.error("Cannot tranfer rendering session if " +
"MediaDeviceSession has closed it");

@ -115,7 +115,7 @@ public class VideoMediaDeviceSession
* <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them.
*/
private final VideoNotifierSupport videoNotifierSupport
= new VideoNotifierSupport(this);
= new VideoNotifierSupport(this, false);
/**
* Initializes a new <tt>VideoMediaDeviceSession</tt> instance which is to
@ -291,9 +291,8 @@ protected void disposePlayer(Player player)
if (visualComponent != null)
{
fireVideoEvent(
VideoEvent.VIDEO_REMOVED,
visualComponent,
VideoEvent.REMOTE);
VideoEvent.VIDEO_REMOVED, visualComponent, VideoEvent.REMOTE,
false);
}
}
@ -309,6 +308,9 @@ protected void disposePlayer(Player player)
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
* @return <tt>true</tt> if this event and, more specifically, the visual
* <tt>Component</tt> it describes have been consumed and should be
* considered owned, referenced (which is important because
@ -316,9 +318,8 @@ protected void disposePlayer(Player player)
* otherwise, <tt>false</tt>
*/
protected boolean fireVideoEvent(
int type,
Component visualComponent,
int origin)
int type, Component visualComponent, int origin,
boolean wait)
{
if (logger.isTraceEnabled())
{
@ -330,7 +331,9 @@ protected boolean fireVideoEvent(
}
return
videoNotifierSupport.fireVideoEvent(type, visualComponent, origin);
videoNotifierSupport.fireVideoEvent(
type, visualComponent, origin,
wait);
}
/**
@ -339,10 +342,13 @@ protected boolean fireVideoEvent(
*
* @param videoEvent the <tt>VideoEvent</tt> to be fired to the
* <tt>VideoListener</tt>s registered with this instance
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
*/
protected void fireVideoEvent(VideoEvent videoEvent)
protected void fireVideoEvent(VideoEvent videoEvent, boolean wait)
{
videoNotifierSupport.fireVideoEvent(videoEvent);
videoNotifierSupport.fireVideoEvent(videoEvent, wait);
}
/**
@ -498,7 +504,8 @@ else if (controllerEvent instanceof RealizeCompleteEvent)
if (fireVideoEvent(
VideoEvent.VIDEO_ADDED,
visualComponent,
VideoEvent.LOCAL))
VideoEvent.LOCAL,
true))
{
localVisualComponentConsumed(visualComponent, player);
}
@ -648,7 +655,9 @@ public void paint(Graphics g)
*/
canvas.setName(DESKTOP_STREAMING_ICON);
fireVideoEvent(VideoEvent.VIDEO_ADDED, canvas, VideoEvent.LOCAL);
fireVideoEvent(
VideoEvent.VIDEO_ADDED, canvas, VideoEvent.LOCAL,
false);
}
return canvas;
}
@ -669,9 +678,8 @@ public void disposeLocalVisualComponent(Component component)
&& DESKTOP_STREAMING_ICON.equals(component.getName()))
{
fireVideoEvent(
VideoEvent.VIDEO_REMOVED,
component,
VideoEvent.LOCAL);
VideoEvent.VIDEO_REMOVED, component, VideoEvent.LOCAL,
false);
return;
}
@ -709,9 +717,8 @@ protected void disposeLocalPlayer(Player player)
if (visualComponent != null)
fireVideoEvent(
VideoEvent.VIDEO_REMOVED,
visualComponent,
VideoEvent.LOCAL);
VideoEvent.VIDEO_REMOVED, visualComponent, VideoEvent.LOCAL,
false);
}
/**
@ -942,9 +949,8 @@ public void componentResized(ComponentEvent e)
});
fireVideoEvent(
VideoEvent.VIDEO_ADDED,
visualComponent,
VideoEvent.REMOTE);
VideoEvent.VIDEO_ADDED, visualComponent, VideoEvent.REMOTE,
false);
}
}
@ -991,7 +997,8 @@ public void run()
visualComponent,
SizeChangeVideoEvent.REMOTE,
width,
height));
height),
false);
}
}
@ -1516,7 +1523,8 @@ protected void startedDirectionChanged(
fireVideoEvent(
VideoEvent.VIDEO_ADDED,
visualComponent,
VideoEvent.REMOTE);
VideoEvent.REMOTE,
false);
}
}
}
@ -1531,7 +1539,8 @@ else if (state > Processor.Configured)
fireVideoEvent(
VideoEvent.VIDEO_REMOVED,
visualComponent,
VideoEvent.REMOTE);
VideoEvent.REMOTE,
false);
}
}
}

@ -10,10 +10,11 @@ Import-Package: org.bouncycastle.crypto,
org.bouncycastle.crypto.macs,
org.bouncycastle.crypto.params,
org.bouncycastle.crypto.prng,
org.ice4j.socket,
org.json,
org.osgi.framework,
org.w3c.dom,
org.xml.sax,
org.ice4j.socket,
javax.imageio,
javax.sound.sampled,
javax.swing,
@ -21,6 +22,7 @@ Import-Package: org.bouncycastle.crypto,
javax.swing.event,
javax.swing.table,
javax.swing.text,
javax.xml.parsers,
net.java.sip.communicator.service.configuration,
net.java.sip.communicator.service.fileaccess,
net.java.sip.communicator.service.gui,

@ -474,9 +474,7 @@ protected void processActiveRequest(RequestEvent requestEvent,
byte[] rawContent)
{
if(requestEvent.getDialog() != callPeer.getDialog())
{
return;
}
if (rawContent != null)
{
@ -514,19 +512,14 @@ protected void processActiveRequest(RequestEvent requestEvent,
List<ComponentEvent> events = null;
Point p = getOrigin();
events = DesktopSharingProtocolSipImpl.parse(root, size,
p);
events = DesktopSharingProtocolSipImpl.parse(root, size, p);
for(ComponentEvent evt : events)
{
if(evt instanceof MouseEvent)
{
processMouseEvent((MouseEvent)evt);
}
else if(evt instanceof KeyEvent)
{
processKeyboardEvent((KeyEvent)evt);
}
}
}
}

@ -83,10 +83,12 @@ public Call createVideoCall(String uri, MediaDevice mediaDevice)
Address toAddress = parentProvider.parseAddressString(uri);
CallSipImpl call = basicTelephony.createOutgoingCall();
call.setVideoDevice(mediaDevice);
call.setLocalVideoAllowed(true, getMediaUseCase());
call.invite(toAddress, null);
origin = getOriginForMediaDevice(mediaDevice);
return call;
}
@ -119,6 +121,7 @@ public Call createVideoCall(Contact callee, MediaDevice mediaDevice)
}
CallSipImpl call = basicTelephony.createOutgoingCall();
call.setLocalVideoAllowed(true, getMediaUseCase());
call.setVideoDevice(mediaDevice);
call.invite(toAddress, null);
@ -146,9 +149,9 @@ public Call createVideoCall(Contact callee, MediaDevice mediaDevice)
public Call createVideoCall(String uri)
throws OperationFailedException, ParseException
{
Call call = super.createVideoCall(uri);
MediaDevice device = ((CallSipImpl)call).getDefaultDevice(
MediaType.VIDEO);
CallSipImpl call = (CallSipImpl) super.createVideoCall(uri);
MediaDevice device = call.getDefaultDevice(MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize());
origin = getOriginForMediaDevice(device);
return call;
@ -170,9 +173,9 @@ public Call createVideoCall(String uri)
@Override
public Call createVideoCall(Contact callee) throws OperationFailedException
{
Call call = super.createVideoCall(callee);
MediaDevice device = ((CallSipImpl)call).getDefaultDevice(
MediaType.VIDEO);
CallSipImpl call = (CallSipImpl) super.createVideoCall(callee);
MediaDevice device = call.getDefaultDevice(MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize());
origin = getOriginForMediaDevice(device);
return call;
@ -188,21 +191,24 @@ public Call createVideoCall(Contact callee) throws OperationFailedException
* @param allowed <tt>true</tt> if local video transmission is allowed and
* <tt>false</tt> otherwise.
*
* @throws OperationFailedException if video initialization fails.
* @throws OperationFailedException if video initialization fails.
*/
@Override
public void setLocalVideoAllowed(Call call, boolean allowed)
throws OperationFailedException
{
((CallSipImpl)call).setLocalVideoAllowed(allowed, MediaUseCase.DESKTOP);
((CallSipImpl)call).setVideoDevice(null);
MediaDevice device = ((CallSipImpl)call).getDefaultDevice(
MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize());
CallSipImpl callImpl = (CallSipImpl) call;
callImpl.setLocalVideoAllowed(allowed, MediaUseCase.DESKTOP);
callImpl.setVideoDevice(null);
MediaDevice device = callImpl.getDefaultDevice(MediaType.VIDEO);
size = ((VideoMediaFormat)device.getFormat()).getSize();
origin = getOriginForMediaDevice(device);
/* reinvite all peers */
((CallSipImpl)call).reInvite();
callImpl.reInvite();
}
/**
@ -267,13 +273,10 @@ public boolean isPartialStreaming(Call call)
CallSipImpl callImpl = (CallSipImpl)call;
MediaDevice device = callImpl.getDefaultDevice(MediaType.VIDEO);
if(device != null)
{
MediaService mediaService = SipActivator.getMediaService();
return mediaService.isPartialStreaming(device);
}
return false;
return
(device == null)
? false
: SipActivator.getMediaService().isPartialStreaming(device);
}
/**
@ -325,8 +328,8 @@ public Point getOrigin()
*/
protected static Point getOriginForMediaDevice(MediaDevice device)
{
MediaService mediaService = SipActivator.getMediaService();
return mediaService.getOriginForDesktopStreamingDevice(device);
return
SipActivator.getMediaService().getOriginForDesktopStreamingDevice(
device);
}
}

@ -6,18 +6,27 @@
*/
package net.java.sip.communicator.service.neomedia.event;
import java.awt.Component;
import java.awt.*;
import java.util.*;
import java.util.List; // disambiguation
/**
* Represents a mechanism to easily add to a specific <tt>Object</tt> by means
* of composition support for firing <tt>VideoEvent</tt>s to
* <tt>VideoListener</tt>s.
*
* @author Lubomir Marinov
* @author Lyubomir Marinov
*/
public class VideoNotifierSupport
{
private static final long THREAD_TIMEOUT = 5000;
/**
* The list of <tt>VideoEvent</tt>s which are to be delivered to the
* {@link #listeners} registered with this instance when
* {@link #synchronous} is equal to <tt>false</tt>.
*/
private final List<VideoEvent> events;
/**
* The list of <tt>VideoListener</tt>s interested in changes in the
@ -32,6 +41,18 @@ public class VideoNotifierSupport
*/
private final Object source;
/**
* The indicator which determines whether this instance delivers the
* <tt>VideoEvent</tt>s to the {@link #listeners} synchronously.
*/
private final boolean synchronous;
/**
* The <tt>Thread</tt> in which {@link #events} are delivered to the
* {@link #listeners} when {@link #synchronous} is equal to <tt>false</tt>.
*/
private Thread thread;
/**
* Initializes a new <tt>VideoNotifierSupport</tt> instance which is to
* facilitate the management of <tt>VideoListener</tt>s and firing
@ -41,8 +62,26 @@ public class VideoNotifierSupport
* of the <tt>VideoEvent</tt>s fired by the new instance
*/
public VideoNotifierSupport(Object source)
{
this(source, true);
}
/**
* Initializes a new <tt>VideoNotifierSupport</tt> instance which is to
* facilitate the management of <tt>VideoListener</tt>s and firing
* <tt>VideoEvent</tt>s to them for a specific <tt>Object</tt>.
*
* @param source the <tt>Object</tt> which is to be reported as the source
* of the <tt>VideoEvent</tt>s fired by the new instance
* @param synchronous <tt>true</tt> if the new instance is to deliver the
* <tt>VideoEvent</tt>s synchronously; otherwise, <tt>false</tt>
*/
public VideoNotifierSupport(Object source, boolean synchronous)
{
this.source = source;
this.synchronous = synchronous;
events = this.synchronous ? null : new LinkedList<VideoEvent>();
}
/**
@ -71,6 +110,32 @@ public void addVideoListener(VideoListener listener)
}
}
protected void doFireVideoEvent(VideoEvent event)
{
VideoListener[] listeners;
synchronized (this.listeners)
{
listeners
= this.listeners.toArray(
new VideoListener[this.listeners.size()]);
}
for (VideoListener listener : listeners)
switch (event.getType())
{
case VideoEvent.VIDEO_ADDED:
listener.videoAdded(event);
break;
case VideoEvent.VIDEO_REMOVED:
listener.videoRemoved(event);
break;
default:
listener.videoUpdate(event);
break;
}
}
/**
* Notifies the <tt>VideoListener</tt>s registered with this
* <tt>VideoMediaStream</tt> about a specific type of change in the
@ -83,6 +148,9 @@ public void addVideoListener(VideoListener listener)
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
* @return <tt>true</tt> if this event and, more specifically, the visual
* <tt>Component</tt> it describes have been consumed and should be
* considered owned, referenced (which is important because
@ -90,44 +158,14 @@ public void addVideoListener(VideoListener listener)
* otherwise, <tt>false</tt>
*/
public boolean fireVideoEvent(
int type,
Component visualComponent,
int origin)
int type, Component visualComponent, int origin,
boolean wait)
{
VideoListener[] listeners;
synchronized (this.listeners)
{
listeners
= this.listeners
.toArray(new VideoListener[this.listeners.size()]);
}
boolean consumed;
if (listeners.length > 0)
{
VideoEvent event
= new VideoEvent(source, type, visualComponent, origin);
for (VideoListener listener : listeners)
switch (type)
{
case VideoEvent.VIDEO_ADDED:
listener.videoAdded(event);
break;
case VideoEvent.VIDEO_REMOVED:
listener.videoRemoved(event);
break;
default:
throw new IllegalArgumentException("type");
}
VideoEvent event
= new VideoEvent(source, type, visualComponent, origin);
consumed = event.isConsumed();
}
else
consumed = false;
return consumed;
fireVideoEvent(event, wait);
return event.isConsumed();
}
/**
@ -136,31 +174,45 @@ public boolean fireVideoEvent(
*
* @param event the <tt>VideoEvent</tt> to be fired to the
* <tt>VideoListener</tt>s registered with this instance
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
*/
public void fireVideoEvent(VideoEvent event)
public void fireVideoEvent(VideoEvent event, boolean wait)
{
VideoListener[] listeners;
synchronized (this.listeners)
if (synchronous)
doFireVideoEvent(event);
else
{
listeners
= this.listeners
.toArray(new VideoListener[this.listeners.size()]);
}
for (VideoListener listener : listeners)
switch (event.getType())
synchronized (events)
{
case VideoEvent.VIDEO_ADDED:
listener.videoAdded(event);
break;
case VideoEvent.VIDEO_REMOVED:
listener.videoRemoved(event);
break;
default:
listener.videoUpdate(event);
break;
events.add(event);
if (thread == null)
startThread();
else
events.notify();
if (wait)
{
boolean interrupted = false;
while (events.contains(event) && (thread != null))
{
try
{
events.wait();
}
catch (InterruptedException ie)
{
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
}
}
}
}
/**
@ -179,4 +231,97 @@ public void removeVideoListener(VideoListener listener)
listeners.remove(listener);
}
}
private void runInThread()
{
while (true)
{
VideoEvent event = null;
synchronized (events)
{
long emptyTime = -1;
boolean interrupted = false;
while (events.isEmpty())
{
if (emptyTime == -1)
emptyTime = System.currentTimeMillis();
else
{
long newEmptyTime = System.currentTimeMillis();
if ((newEmptyTime - emptyTime) >= THREAD_TIMEOUT)
{
events.notify();
return;
}
}
try
{
events.wait(THREAD_TIMEOUT);
}
catch (InterruptedException ie)
{
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
event = events.remove(0);
}
if (event != null)
{
try
{
doFireVideoEvent(event);
}
catch (Throwable t)
{
if (t instanceof ThreadDeath)
throw (ThreadDeath) t;
}
synchronized (events)
{
events.notify();
}
}
}
}
private void startThread()
{
thread
= new Thread("VideoNotifierSupportThread")
{
@Override
public void run()
{
try
{
runInThread();
}
finally
{
synchronized (events)
{
if (Thread.currentThread().equals(thread))
{
thread = null;
if (events.isEmpty())
events.notify();
else
startThread();
}
}
}
}
};
thread.setDaemon(true);
thread.start();
}
}

@ -375,12 +375,14 @@ public void logAndFail(String message, Throwable throwable)
public void setLocalVideoAllowed(boolean allowed)
throws OperationFailedException
{
if(getMediaHandler().isLocalVideoTransmissionEnabled() == allowed)
return;
CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
// Modify the local media setup to reflect the requested setting for
// the streaming of the local video.
getMediaHandler().setLocalVideoTransmissionEnabled(allowed);
if(mediaHandler.isLocalVideoTransmissionEnabled() != allowed)
{
// Modify the local media setup to reflect the requested setting for
// the streaming of the local video.
mediaHandler.setLocalVideoTransmissionEnabled(allowed);
}
}
/**
@ -509,14 +511,16 @@ public void setState(CallPeerState newState, String reason, int reasonCode)
// of CallPeerMediaHandler) we won't set and fire the current state
// to Disconnected. Before closing the mediaHandler is setting the state
// in order to deliver states as quick as possible.
synchronized(getMediaHandler())
CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
synchronized(mediaHandler)
{
super.setState(newState, reason, reasonCode);
if (CallPeerState.DISCONNECTED.equals(newState)
|| CallPeerState.FAILED.equals(newState))
{
getMediaHandler().close();
mediaHandler.close();
}
}
}
@ -788,8 +792,10 @@ public void conferenceMemberAdded(CallPeerConferenceEvent conferenceEvent)
// us audio for at least two separate participants. We therefore
// need to remove the stream level listeners and switch to CSRC
// level listening
getMediaHandler().setStreamAudioLevelListener(null);
getMediaHandler().setCsrcAudioLevelListener(this);
CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
mediaHandler.setStreamAudioLevelListener(null);
mediaHandler.setCsrcAudioLevelListener(this);
}
}
@ -811,8 +817,10 @@ public void conferenceMemberRemoved(CallPeerConferenceEvent conferenceEvent)
// since there's only us and her in the call. Lets stop being a CSRC
// listener and move back to listening the audio level of the
// stream itself.
getMediaHandler().setStreamAudioLevelListener(this);
getMediaHandler().setCsrcAudioLevelListener(null);
CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
mediaHandler.setStreamAudioLevelListener(this);
mediaHandler.setCsrcAudioLevelListener(null);
}
}

Loading…
Cancel
Save