Addresses a possible deadlock in video calls.

cusax-fix
Lyubomir Marinov 14 years ago
parent 95be6b19ee
commit e46f74f442

@ -787,26 +787,27 @@ private void handleVideoEvent(final VideoEvent event)
logger.trace("UI video event received originated in: " logger.trace("UI video event received originated in: "
+ event.getOrigin() + " and is of type: " + event.getType()); + event.getOrigin() + " and is of type: " + event.getType());
synchronized (videoContainers) if ((event != null) && !event.isConsumed())
{ {
if ((event != null) && !event.isConsumed()) int origin = event.getOrigin();
{ Component video = event.getVisualComponent();
Component video = event.getVisualComponent();
synchronized (videoContainers)
{
switch (event.getType()) switch (event.getType())
{ {
case VideoEvent.VIDEO_ADDED: case VideoEvent.VIDEO_ADDED:
if(event.getOrigin() == VideoEvent.LOCAL) if (origin == VideoEvent.LOCAL)
{ {
this.localVideo = video; this.localVideo = video;
this.closeButton = new CloseButton(); this.closeButton = new CloseButton();
} }
else if(event.getOrigin() == VideoEvent.REMOTE) else if (origin == VideoEvent.REMOTE)
{ {
this.remoteVideo = video; this.remoteVideo = video;
} }
addMouseListeners(event.getOrigin()); addMouseListeners(origin);
/* /*
* Let the creator of the local visual Component know it * Let the creator of the local visual Component know it
@ -816,16 +817,18 @@ else if(event.getOrigin() == VideoEvent.REMOTE)
break; break;
case VideoEvent.VIDEO_REMOVED: case VideoEvent.VIDEO_REMOVED:
if (event.getOrigin() == VideoEvent.LOCAL && if (origin == VideoEvent.LOCAL)
localVideo == video)
{ {
this.localVideo = null; if (localVideo == video)
this.closeButton = null; {
this.localVideo = null;
this.closeButton = null;
}
} }
else if(event.getOrigin() == VideoEvent.REMOTE && else if (origin == VideoEvent.REMOTE)
remoteVideo == video)
{ {
this.remoteVideo = null; if (remoteVideo == video)
this.remoteVideo = null;
} }
break; break;
} }

@ -272,7 +272,7 @@ public MediaDevice getDefaultDevice(
break; break;
case VIDEO: case VIDEO:
captureDeviceInfo captureDeviceInfo
= getDeviceConfiguration().getVideoCaptureDevice(useCase); = getDeviceConfiguration().getVideoCaptureDevice(useCase);
break; break;
default: default:
captureDeviceInfo = null; captureDeviceInfo = null;
@ -314,10 +314,12 @@ public MediaDevice getDefaultDevice(
} }
} }
//Don't use the device in case the user has disabled all codecs for that /*
//kind of media. * Don't use the device in case the user has disabled all codecs for
* that kind of media.
*/
if ((defaultDevice != null) if ((defaultDevice != null)
&& (defaultDevice.getSupportedFormats().isEmpty())) && (defaultDevice.getSupportedFormats().isEmpty()))
{ {
defaultDevice = null; defaultDevice = null;
} }

@ -1392,7 +1392,7 @@ public void setDevice(MediaDevice device)
deviceSessionPropertyChangeListener); deviceSessionPropertyChangeListener);
// keep player active // keep player active
deviceSession.setDisposePlayerWhenClose(false); deviceSession.setDisposePlayerOnClose(false);
deviceSession.close(); deviceSession.close();
deviceSession = null; deviceSession = null;
} }

@ -236,9 +236,18 @@ public int compare(FormatInfo info0, FormatInfo info1)
/** /**
* The facility which aids this instance in managing a list of * The facility which aids this instance in managing a list of
* <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them. * <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them.
* <p>
* Since the <tt>videoNotifierSupport</tt> of this
* <tt>VideoMediaStreamImpl</tt> just forwards the <tt>VideoEvent</tt>s of
* the associated <tt>VideoMediaDeviceSession</tt> at the time of this
* writing, it does not make sense to have <tt>videoNotifierSupport</tt>
* executing asynchronously because it does not know whether it has to wait
* for the delivery of the <tt>VideoEvent</tt>s and thus it has to default
* to waiting anyway.
* </p>
*/ */
private final VideoNotifierSupport videoNotifierSupport private final VideoNotifierSupport videoNotifierSupport
= new VideoNotifierSupport(this); = new VideoNotifierSupport(this, true);
/** /**
* Initializes a new <tt>VideoMediaStreamImpl</tt> instance which will use * Initializes a new <tt>VideoMediaStreamImpl</tt> instance which will use
@ -408,7 +417,8 @@ public void videoAdded(VideoEvent e)
if (fireVideoEvent( if (fireVideoEvent(
e.getType(), e.getType(),
e.getVisualComponent(), e.getVisualComponent(),
e.getOrigin())) e.getOrigin(),
true))
e.consume(); e.consume();
} }
@ -429,7 +439,7 @@ public void videoRemoved(VideoEvent e)
public void videoUpdate(VideoEvent e) public void videoUpdate(VideoEvent e)
{ {
fireVideoEvent(e); fireVideoEvent(e, true);
} }
}; };
} }
@ -480,6 +490,9 @@ public void disposeLocalVisualComponent(Component component)
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is * @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if * local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it) * the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
* @return <tt>true</tt> if this event and, more specifically, the visual * @return <tt>true</tt> if this event and, more specifically, the visual
* <tt>Component</tt> it describes have been consumed and should be * <tt>Component</tt> it describes have been consumed and should be
* considered owned, referenced (which is important because * considered owned, referenced (which is important because
@ -487,9 +500,8 @@ public void disposeLocalVisualComponent(Component component)
* otherwise, <tt>false</tt> * otherwise, <tt>false</tt>
*/ */
protected boolean fireVideoEvent( protected boolean fireVideoEvent(
int type, int type, Component visualComponent, int origin,
Component visualComponent, boolean wait)
int origin)
{ {
if (logger.isTraceEnabled()) if (logger.isTraceEnabled())
logger logger
@ -500,7 +512,9 @@ protected boolean fireVideoEvent(
+ VideoEvent.originToString(origin)); + VideoEvent.originToString(origin));
return return
videoNotifierSupport.fireVideoEvent(type, visualComponent, origin); videoNotifierSupport.fireVideoEvent(
type, visualComponent, origin,
wait);
} }
/** /**
@ -509,10 +523,13 @@ protected boolean fireVideoEvent(
* *
* @param event the <tt>VideoEvent</tt> to be fired to the * @param event the <tt>VideoEvent</tt> to be fired to the
* <tt>VideoListener</tt>s registered with this instance * <tt>VideoListener</tt>s registered with this instance
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
*/ */
protected void fireVideoEvent(VideoEvent event) protected void fireVideoEvent(VideoEvent event, boolean wait)
{ {
videoNotifierSupport.fireVideoEvent(event); videoNotifierSupport.fireVideoEvent(event, wait);
} }
/** /**

@ -76,37 +76,33 @@ public ImageStreamingAuto() throws Exception
for(ScreenDevice screen : screens) for(ScreenDevice screen : screens)
{ {
Dimension size = screenSize != null ? screenSize : screen.getSize(); Dimension size = screenSize != null ? screenSize : screen.getSize();
Format formats[]
Format formats[]= new Format[] = new Format[]
{ {
new AVFrameFormat( new AVFrameFormat(
size, size,
Format.NOT_SPECIFIED, Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_ARGB, FFmpeg.PIX_FMT_ARGB,
Format.NOT_SPECIFIED), Format.NOT_SPECIFIED),
new RGBFormat( new RGBFormat(
size, // size size, // size
Format.NOT_SPECIFIED, // maxDataLength Format.NOT_SPECIFIED, // maxDataLength
Format.byteArray, // dataType Format.byteArray, // dataType
Format.NOT_SPECIFIED, // frameRate Format.NOT_SPECIFIED, // frameRate
32, // bitsPerPixel 32, // bitsPerPixel
2 /* red */, 3 /* green */, 4 /* blue */) 2 /* red */, 3 /* green */, 4 /* blue */)
}; };
CaptureDeviceInfo devInfo CaptureDeviceInfo devInfo
= new CaptureDeviceInfo( = new CaptureDeviceInfo(
name + " " + i, name + " " + i,
new MediaLocator(LOCATOR_PROTOCOL + ":" + i), new MediaLocator(LOCATOR_PROTOCOL + ":" + i),
formats); formats);
/* add to JMF device manager */
CaptureDeviceManager.addDevice(devInfo); CaptureDeviceManager.addDevice(devInfo);
i++; i++;
if(multipleMonitorOneScreen) if(multipleMonitorOneScreen)
{
break; break;
}
} }
CaptureDeviceManager.commit(); CaptureDeviceManager.commit();

@ -174,9 +174,9 @@ public class MediaDeviceSession
private MediaDirection startedDirection = MediaDirection.INACTIVE; private MediaDirection startedDirection = MediaDirection.INACTIVE;
/** /**
* If the player have to be disposed when we #close() this instance. * If the player have to be disposed when we {@link #close()} this instance.
*/ */
private boolean disposePlayerWhenClose = true; private boolean disposePlayerOnClose = true;
/** /**
* Whether output size has changed after latest processor config. * Whether output size has changed after latest processor config.
@ -204,9 +204,9 @@ protected MediaDeviceSession(AbstractMediaDevice device)
* *
* @param dispose value to set * @param dispose value to set
*/ */
public void setDisposePlayerWhenClose(boolean dispose) public void setDisposePlayerOnClose(boolean dispose)
{ {
disposePlayerWhenClose = dispose; disposePlayerOnClose = dispose;
} }
/** /**
@ -337,11 +337,9 @@ public void close()
disconnectCaptureDevice(); disconnectCaptureDevice();
closeProcessor(); closeProcessor();
if(disposePlayerWhenClose) // playback
{ if (disposePlayerOnClose)
// playback
disposePlayer(); disposePlayer();
}
} }
/** /**
@ -363,8 +361,16 @@ private void closeProcessor()
if (processor.getState() == Processor.Realized) if (processor.getState() == Processor.Realized)
{ {
DataSource dataOutput = processor.getDataOutput(); DataSource dataOutput;
try
{
dataOutput = processor.getDataOutput();
}
catch (NotRealizedError nre)
{
dataOutput = null;
}
if (dataOutput != null) if (dataOutput != null)
dataOutput.disconnect(); dataOutput.disconnect();
} }
@ -590,11 +596,23 @@ private void disconnectCaptureDevice()
*/ */
private void disposePlayer() private void disposePlayer()
{ {
Player player;
synchronized (playbackSyncRoot) synchronized (playbackSyncRoot)
{ {
if (player != null) /*
disposePlayer(player); * If #disposePlayer(Player) is just executed inside the
* synchronized block protected by #playbackSyncRoot, it practically
* locks the rest of the state protected by the same synchronization
* root. But that is not necessary because #disposePlayer(Player)
* will protect #player when necessary. Anyway, the change from the
* described behavior to the current one has been made while solving
* a deadlock.
*/
player = this.player;
} }
if (player != null)
disposePlayer(player);
} }
/** /**
@ -1815,7 +1833,7 @@ private static boolean waitForState(Processor processor, int state)
*/ */
protected void transferRenderingSession(MediaDeviceSession session) protected void transferRenderingSession(MediaDeviceSession session)
{ {
if(session.disposePlayerWhenClose) if (session.disposePlayerOnClose)
{ {
logger.error("Cannot tranfer rendering session if " + logger.error("Cannot tranfer rendering session if " +
"MediaDeviceSession has closed it"); "MediaDeviceSession has closed it");

@ -115,7 +115,7 @@ public class VideoMediaDeviceSession
* <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them. * <tt>VideoListener</tt>s and firing <tt>VideoEvent</tt>s to them.
*/ */
private final VideoNotifierSupport videoNotifierSupport private final VideoNotifierSupport videoNotifierSupport
= new VideoNotifierSupport(this); = new VideoNotifierSupport(this, false);
/** /**
* Initializes a new <tt>VideoMediaDeviceSession</tt> instance which is to * Initializes a new <tt>VideoMediaDeviceSession</tt> instance which is to
@ -291,9 +291,8 @@ protected void disposePlayer(Player player)
if (visualComponent != null) if (visualComponent != null)
{ {
fireVideoEvent( fireVideoEvent(
VideoEvent.VIDEO_REMOVED, VideoEvent.VIDEO_REMOVED, visualComponent, VideoEvent.REMOTE,
visualComponent, false);
VideoEvent.REMOTE);
} }
} }
@ -309,6 +308,9 @@ protected void disposePlayer(Player player)
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is * @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if * local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it) * the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
* @return <tt>true</tt> if this event and, more specifically, the visual * @return <tt>true</tt> if this event and, more specifically, the visual
* <tt>Component</tt> it describes have been consumed and should be * <tt>Component</tt> it describes have been consumed and should be
* considered owned, referenced (which is important because * considered owned, referenced (which is important because
@ -316,9 +318,8 @@ protected void disposePlayer(Player player)
* otherwise, <tt>false</tt> * otherwise, <tt>false</tt>
*/ */
protected boolean fireVideoEvent( protected boolean fireVideoEvent(
int type, int type, Component visualComponent, int origin,
Component visualComponent, boolean wait)
int origin)
{ {
if (logger.isTraceEnabled()) if (logger.isTraceEnabled())
{ {
@ -330,7 +331,9 @@ protected boolean fireVideoEvent(
} }
return return
videoNotifierSupport.fireVideoEvent(type, visualComponent, origin); videoNotifierSupport.fireVideoEvent(
type, visualComponent, origin,
wait);
} }
/** /**
@ -339,10 +342,13 @@ protected boolean fireVideoEvent(
* *
* @param videoEvent the <tt>VideoEvent</tt> to be fired to the * @param videoEvent the <tt>VideoEvent</tt> to be fired to the
* <tt>VideoListener</tt>s registered with this instance * <tt>VideoListener</tt>s registered with this instance
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
*/ */
protected void fireVideoEvent(VideoEvent videoEvent) protected void fireVideoEvent(VideoEvent videoEvent, boolean wait)
{ {
videoNotifierSupport.fireVideoEvent(videoEvent); videoNotifierSupport.fireVideoEvent(videoEvent, wait);
} }
/** /**
@ -498,7 +504,8 @@ else if (controllerEvent instanceof RealizeCompleteEvent)
if (fireVideoEvent( if (fireVideoEvent(
VideoEvent.VIDEO_ADDED, VideoEvent.VIDEO_ADDED,
visualComponent, visualComponent,
VideoEvent.LOCAL)) VideoEvent.LOCAL,
true))
{ {
localVisualComponentConsumed(visualComponent, player); localVisualComponentConsumed(visualComponent, player);
} }
@ -648,7 +655,9 @@ public void paint(Graphics g)
*/ */
canvas.setName(DESKTOP_STREAMING_ICON); canvas.setName(DESKTOP_STREAMING_ICON);
fireVideoEvent(VideoEvent.VIDEO_ADDED, canvas, VideoEvent.LOCAL); fireVideoEvent(
VideoEvent.VIDEO_ADDED, canvas, VideoEvent.LOCAL,
false);
} }
return canvas; return canvas;
} }
@ -669,9 +678,8 @@ public void disposeLocalVisualComponent(Component component)
&& DESKTOP_STREAMING_ICON.equals(component.getName())) && DESKTOP_STREAMING_ICON.equals(component.getName()))
{ {
fireVideoEvent( fireVideoEvent(
VideoEvent.VIDEO_REMOVED, VideoEvent.VIDEO_REMOVED, component, VideoEvent.LOCAL,
component, false);
VideoEvent.LOCAL);
return; return;
} }
@ -709,9 +717,8 @@ protected void disposeLocalPlayer(Player player)
if (visualComponent != null) if (visualComponent != null)
fireVideoEvent( fireVideoEvent(
VideoEvent.VIDEO_REMOVED, VideoEvent.VIDEO_REMOVED, visualComponent, VideoEvent.LOCAL,
visualComponent, false);
VideoEvent.LOCAL);
} }
/** /**
@ -942,9 +949,8 @@ public void componentResized(ComponentEvent e)
}); });
fireVideoEvent( fireVideoEvent(
VideoEvent.VIDEO_ADDED, VideoEvent.VIDEO_ADDED, visualComponent, VideoEvent.REMOTE,
visualComponent, false);
VideoEvent.REMOTE);
} }
} }
@ -991,7 +997,8 @@ public void run()
visualComponent, visualComponent,
SizeChangeVideoEvent.REMOTE, SizeChangeVideoEvent.REMOTE,
width, width,
height)); height),
false);
} }
} }
@ -1516,7 +1523,8 @@ protected void startedDirectionChanged(
fireVideoEvent( fireVideoEvent(
VideoEvent.VIDEO_ADDED, VideoEvent.VIDEO_ADDED,
visualComponent, visualComponent,
VideoEvent.REMOTE); VideoEvent.REMOTE,
false);
} }
} }
} }
@ -1531,7 +1539,8 @@ else if (state > Processor.Configured)
fireVideoEvent( fireVideoEvent(
VideoEvent.VIDEO_REMOVED, VideoEvent.VIDEO_REMOVED,
visualComponent, visualComponent,
VideoEvent.REMOTE); VideoEvent.REMOTE,
false);
} }
} }
} }

@ -10,10 +10,11 @@ Import-Package: org.bouncycastle.crypto,
org.bouncycastle.crypto.macs, org.bouncycastle.crypto.macs,
org.bouncycastle.crypto.params, org.bouncycastle.crypto.params,
org.bouncycastle.crypto.prng, org.bouncycastle.crypto.prng,
org.ice4j.socket,
org.json, org.json,
org.osgi.framework, org.osgi.framework,
org.w3c.dom,
org.xml.sax, org.xml.sax,
org.ice4j.socket,
javax.imageio, javax.imageio,
javax.sound.sampled, javax.sound.sampled,
javax.swing, javax.swing,
@ -21,6 +22,7 @@ Import-Package: org.bouncycastle.crypto,
javax.swing.event, javax.swing.event,
javax.swing.table, javax.swing.table,
javax.swing.text, javax.swing.text,
javax.xml.parsers,
net.java.sip.communicator.service.configuration, net.java.sip.communicator.service.configuration,
net.java.sip.communicator.service.fileaccess, net.java.sip.communicator.service.fileaccess,
net.java.sip.communicator.service.gui, net.java.sip.communicator.service.gui,

@ -474,9 +474,7 @@ protected void processActiveRequest(RequestEvent requestEvent,
byte[] rawContent) byte[] rawContent)
{ {
if(requestEvent.getDialog() != callPeer.getDialog()) if(requestEvent.getDialog() != callPeer.getDialog())
{
return; return;
}
if (rawContent != null) if (rawContent != null)
{ {
@ -514,19 +512,14 @@ protected void processActiveRequest(RequestEvent requestEvent,
List<ComponentEvent> events = null; List<ComponentEvent> events = null;
Point p = getOrigin(); Point p = getOrigin();
events = DesktopSharingProtocolSipImpl.parse(root, size, events = DesktopSharingProtocolSipImpl.parse(root, size, p);
p);
for(ComponentEvent evt : events) for(ComponentEvent evt : events)
{ {
if(evt instanceof MouseEvent) if(evt instanceof MouseEvent)
{
processMouseEvent((MouseEvent)evt); processMouseEvent((MouseEvent)evt);
}
else if(evt instanceof KeyEvent) else if(evt instanceof KeyEvent)
{
processKeyboardEvent((KeyEvent)evt); processKeyboardEvent((KeyEvent)evt);
}
} }
} }
} }

@ -83,10 +83,12 @@ public Call createVideoCall(String uri, MediaDevice mediaDevice)
Address toAddress = parentProvider.parseAddressString(uri); Address toAddress = parentProvider.parseAddressString(uri);
CallSipImpl call = basicTelephony.createOutgoingCall(); CallSipImpl call = basicTelephony.createOutgoingCall();
call.setVideoDevice(mediaDevice); call.setVideoDevice(mediaDevice);
call.setLocalVideoAllowed(true, getMediaUseCase()); call.setLocalVideoAllowed(true, getMediaUseCase());
call.invite(toAddress, null); call.invite(toAddress, null);
origin = getOriginForMediaDevice(mediaDevice); origin = getOriginForMediaDevice(mediaDevice);
return call; return call;
} }
@ -119,6 +121,7 @@ public Call createVideoCall(Contact callee, MediaDevice mediaDevice)
} }
CallSipImpl call = basicTelephony.createOutgoingCall(); CallSipImpl call = basicTelephony.createOutgoingCall();
call.setLocalVideoAllowed(true, getMediaUseCase()); call.setLocalVideoAllowed(true, getMediaUseCase());
call.setVideoDevice(mediaDevice); call.setVideoDevice(mediaDevice);
call.invite(toAddress, null); call.invite(toAddress, null);
@ -146,9 +149,9 @@ public Call createVideoCall(Contact callee, MediaDevice mediaDevice)
public Call createVideoCall(String uri) public Call createVideoCall(String uri)
throws OperationFailedException, ParseException throws OperationFailedException, ParseException
{ {
Call call = super.createVideoCall(uri); CallSipImpl call = (CallSipImpl) super.createVideoCall(uri);
MediaDevice device = ((CallSipImpl)call).getDefaultDevice( MediaDevice device = call.getDefaultDevice(MediaType.VIDEO);
MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize()); size = (((VideoMediaFormat)device.getFormat()).getSize());
origin = getOriginForMediaDevice(device); origin = getOriginForMediaDevice(device);
return call; return call;
@ -170,9 +173,9 @@ public Call createVideoCall(String uri)
@Override @Override
public Call createVideoCall(Contact callee) throws OperationFailedException public Call createVideoCall(Contact callee) throws OperationFailedException
{ {
Call call = super.createVideoCall(callee); CallSipImpl call = (CallSipImpl) super.createVideoCall(callee);
MediaDevice device = ((CallSipImpl)call).getDefaultDevice( MediaDevice device = call.getDefaultDevice(MediaType.VIDEO);
MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize()); size = (((VideoMediaFormat)device.getFormat()).getSize());
origin = getOriginForMediaDevice(device); origin = getOriginForMediaDevice(device);
return call; return call;
@ -188,21 +191,24 @@ public Call createVideoCall(Contact callee) throws OperationFailedException
* @param allowed <tt>true</tt> if local video transmission is allowed and * @param allowed <tt>true</tt> if local video transmission is allowed and
* <tt>false</tt> otherwise. * <tt>false</tt> otherwise.
* *
* @throws OperationFailedException if video initialization fails. * @throws OperationFailedException if video initialization fails.
*/ */
@Override @Override
public void setLocalVideoAllowed(Call call, boolean allowed) public void setLocalVideoAllowed(Call call, boolean allowed)
throws OperationFailedException throws OperationFailedException
{ {
((CallSipImpl)call).setLocalVideoAllowed(allowed, MediaUseCase.DESKTOP); CallSipImpl callImpl = (CallSipImpl) call;
((CallSipImpl)call).setVideoDevice(null);
MediaDevice device = ((CallSipImpl)call).getDefaultDevice( callImpl.setLocalVideoAllowed(allowed, MediaUseCase.DESKTOP);
MediaType.VIDEO); callImpl.setVideoDevice(null);
size = (((VideoMediaFormat)device.getFormat()).getSize());
MediaDevice device = callImpl.getDefaultDevice(MediaType.VIDEO);
size = ((VideoMediaFormat)device.getFormat()).getSize();
origin = getOriginForMediaDevice(device); origin = getOriginForMediaDevice(device);
/* reinvite all peers */ /* reinvite all peers */
((CallSipImpl)call).reInvite(); callImpl.reInvite();
} }
/** /**
@ -267,13 +273,10 @@ public boolean isPartialStreaming(Call call)
CallSipImpl callImpl = (CallSipImpl)call; CallSipImpl callImpl = (CallSipImpl)call;
MediaDevice device = callImpl.getDefaultDevice(MediaType.VIDEO); MediaDevice device = callImpl.getDefaultDevice(MediaType.VIDEO);
if(device != null) return
{ (device == null)
MediaService mediaService = SipActivator.getMediaService(); ? false
return mediaService.isPartialStreaming(device); : SipActivator.getMediaService().isPartialStreaming(device);
}
return false;
} }
/** /**
@ -325,8 +328,8 @@ public Point getOrigin()
*/ */
protected static Point getOriginForMediaDevice(MediaDevice device) protected static Point getOriginForMediaDevice(MediaDevice device)
{ {
MediaService mediaService = SipActivator.getMediaService(); return
SipActivator.getMediaService().getOriginForDesktopStreamingDevice(
return mediaService.getOriginForDesktopStreamingDevice(device); device);
} }
} }

@ -6,18 +6,27 @@
*/ */
package net.java.sip.communicator.service.neomedia.event; package net.java.sip.communicator.service.neomedia.event;
import java.awt.Component; import java.awt.*;
import java.util.*; import java.util.*;
import java.util.List; // disambiguation
/** /**
* Represents a mechanism to easily add to a specific <tt>Object</tt> by means * Represents a mechanism to easily add to a specific <tt>Object</tt> by means
* of composition support for firing <tt>VideoEvent</tt>s to * of composition support for firing <tt>VideoEvent</tt>s to
* <tt>VideoListener</tt>s. * <tt>VideoListener</tt>s.
* *
* @author Lubomir Marinov * @author Lyubomir Marinov
*/ */
public class VideoNotifierSupport public class VideoNotifierSupport
{ {
private static final long THREAD_TIMEOUT = 5000;
/**
* The list of <tt>VideoEvent</tt>s which are to be delivered to the
* {@link #listeners} registered with this instance when
* {@link #synchronous} is equal to <tt>false</tt>.
*/
private final List<VideoEvent> events;
/** /**
* The list of <tt>VideoListener</tt>s interested in changes in the * The list of <tt>VideoListener</tt>s interested in changes in the
@ -32,6 +41,18 @@ public class VideoNotifierSupport
*/ */
private final Object source; private final Object source;
/**
* The indicator which determines whether this instance delivers the
* <tt>VideoEvent</tt>s to the {@link #listeners} synchronously.
*/
private final boolean synchronous;
/**
* The <tt>Thread</tt> in which {@link #events} are delivered to the
* {@link #listeners} when {@link #synchronous} is equal to <tt>false</tt>.
*/
private Thread thread;
/** /**
* Initializes a new <tt>VideoNotifierSupport</tt> instance which is to * Initializes a new <tt>VideoNotifierSupport</tt> instance which is to
* facilitate the management of <tt>VideoListener</tt>s and firing * facilitate the management of <tt>VideoListener</tt>s and firing
@ -41,8 +62,26 @@ public class VideoNotifierSupport
* of the <tt>VideoEvent</tt>s fired by the new instance * of the <tt>VideoEvent</tt>s fired by the new instance
*/ */
public VideoNotifierSupport(Object source) public VideoNotifierSupport(Object source)
{
this(source, true);
}
/**
* Initializes a new <tt>VideoNotifierSupport</tt> instance which is to
* facilitate the management of <tt>VideoListener</tt>s and firing
* <tt>VideoEvent</tt>s to them for a specific <tt>Object</tt>.
*
* @param source the <tt>Object</tt> which is to be reported as the source
* of the <tt>VideoEvent</tt>s fired by the new instance
* @param synchronous <tt>true</tt> if the new instance is to deliver the
* <tt>VideoEvent</tt>s synchronously; otherwise, <tt>false</tt>
*/
public VideoNotifierSupport(Object source, boolean synchronous)
{ {
this.source = source; this.source = source;
this.synchronous = synchronous;
events = this.synchronous ? null : new LinkedList<VideoEvent>();
} }
/** /**
@ -71,6 +110,32 @@ public void addVideoListener(VideoListener listener)
} }
} }
protected void doFireVideoEvent(VideoEvent event)
{
VideoListener[] listeners;
synchronized (this.listeners)
{
listeners
= this.listeners.toArray(
new VideoListener[this.listeners.size()]);
}
for (VideoListener listener : listeners)
switch (event.getType())
{
case VideoEvent.VIDEO_ADDED:
listener.videoAdded(event);
break;
case VideoEvent.VIDEO_REMOVED:
listener.videoRemoved(event);
break;
default:
listener.videoUpdate(event);
break;
}
}
/** /**
* Notifies the <tt>VideoListener</tt>s registered with this * Notifies the <tt>VideoListener</tt>s registered with this
* <tt>VideoMediaStream</tt> about a specific type of change in the * <tt>VideoMediaStream</tt> about a specific type of change in the
@ -83,6 +148,9 @@ public void addVideoListener(VideoListener listener)
* @param origin {@link VideoEvent#LOCAL} if the origin of the video is * @param origin {@link VideoEvent#LOCAL} if the origin of the video is
* local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if * local (e.g. it is being locally captured); {@link VideoEvent#REMOTE} if
* the origin of the video is remote (e.g. a remote peer is streaming it) * the origin of the video is remote (e.g. a remote peer is streaming it)
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
* @return <tt>true</tt> if this event and, more specifically, the visual * @return <tt>true</tt> if this event and, more specifically, the visual
* <tt>Component</tt> it describes have been consumed and should be * <tt>Component</tt> it describes have been consumed and should be
* considered owned, referenced (which is important because * considered owned, referenced (which is important because
@ -90,44 +158,14 @@ public void addVideoListener(VideoListener listener)
* otherwise, <tt>false</tt> * otherwise, <tt>false</tt>
*/ */
public boolean fireVideoEvent( public boolean fireVideoEvent(
int type, int type, Component visualComponent, int origin,
Component visualComponent, boolean wait)
int origin)
{ {
VideoListener[] listeners; VideoEvent event
= new VideoEvent(source, type, visualComponent, origin);
synchronized (this.listeners)
{
listeners
= this.listeners
.toArray(new VideoListener[this.listeners.size()]);
}
boolean consumed;
if (listeners.length > 0)
{
VideoEvent event
= new VideoEvent(source, type, visualComponent, origin);
for (VideoListener listener : listeners)
switch (type)
{
case VideoEvent.VIDEO_ADDED:
listener.videoAdded(event);
break;
case VideoEvent.VIDEO_REMOVED:
listener.videoRemoved(event);
break;
default:
throw new IllegalArgumentException("type");
}
consumed = event.isConsumed(); fireVideoEvent(event, wait);
} return event.isConsumed();
else
consumed = false;
return consumed;
} }
/** /**
@ -136,31 +174,45 @@ public boolean fireVideoEvent(
* *
* @param event the <tt>VideoEvent</tt> to be fired to the * @param event the <tt>VideoEvent</tt> to be fired to the
* <tt>VideoListener</tt>s registered with this instance * <tt>VideoListener</tt>s registered with this instance
* @param wait <tt>true</tt> if the call is to wait till the specified
* <tt>VideoEvent</tt> has been delivered to the <tt>VideoListener</tt>s;
* otherwise, <tt>false</tt>
*/ */
public void fireVideoEvent(VideoEvent event) public void fireVideoEvent(VideoEvent event, boolean wait)
{ {
VideoListener[] listeners; if (synchronous)
doFireVideoEvent(event);
synchronized (this.listeners) else
{ {
listeners synchronized (events)
= this.listeners
.toArray(new VideoListener[this.listeners.size()]);
}
for (VideoListener listener : listeners)
switch (event.getType())
{ {
case VideoEvent.VIDEO_ADDED: events.add(event);
listener.videoAdded(event);
break; if (thread == null)
case VideoEvent.VIDEO_REMOVED: startThread();
listener.videoRemoved(event); else
break; events.notify();
default:
listener.videoUpdate(event); if (wait)
break; {
boolean interrupted = false;
while (events.contains(event) && (thread != null))
{
try
{
events.wait();
}
catch (InterruptedException ie)
{
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
}
} }
}
} }
/** /**
@ -179,4 +231,97 @@ public void removeVideoListener(VideoListener listener)
listeners.remove(listener); listeners.remove(listener);
} }
} }
private void runInThread()
{
while (true)
{
VideoEvent event = null;
synchronized (events)
{
long emptyTime = -1;
boolean interrupted = false;
while (events.isEmpty())
{
if (emptyTime == -1)
emptyTime = System.currentTimeMillis();
else
{
long newEmptyTime = System.currentTimeMillis();
if ((newEmptyTime - emptyTime) >= THREAD_TIMEOUT)
{
events.notify();
return;
}
}
try
{
events.wait(THREAD_TIMEOUT);
}
catch (InterruptedException ie)
{
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
event = events.remove(0);
}
if (event != null)
{
try
{
doFireVideoEvent(event);
}
catch (Throwable t)
{
if (t instanceof ThreadDeath)
throw (ThreadDeath) t;
}
synchronized (events)
{
events.notify();
}
}
}
}
private void startThread()
{
thread
= new Thread("VideoNotifierSupportThread")
{
@Override
public void run()
{
try
{
runInThread();
}
finally
{
synchronized (events)
{
if (Thread.currentThread().equals(thread))
{
thread = null;
if (events.isEmpty())
events.notify();
else
startThread();
}
}
}
}
};
thread.setDaemon(true);
thread.start();
}
} }

@ -375,12 +375,14 @@ public void logAndFail(String message, Throwable throwable)
public void setLocalVideoAllowed(boolean allowed) public void setLocalVideoAllowed(boolean allowed)
throws OperationFailedException throws OperationFailedException
{ {
if(getMediaHandler().isLocalVideoTransmissionEnabled() == allowed) CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
return;
// Modify the local media setup to reflect the requested setting for if(mediaHandler.isLocalVideoTransmissionEnabled() != allowed)
// the streaming of the local video. {
getMediaHandler().setLocalVideoTransmissionEnabled(allowed); // Modify the local media setup to reflect the requested setting for
// the streaming of the local video.
mediaHandler.setLocalVideoTransmissionEnabled(allowed);
}
} }
/** /**
@ -509,14 +511,16 @@ public void setState(CallPeerState newState, String reason, int reasonCode)
// of CallPeerMediaHandler) we won't set and fire the current state // of CallPeerMediaHandler) we won't set and fire the current state
// to Disconnected. Before closing the mediaHandler is setting the state // to Disconnected. Before closing the mediaHandler is setting the state
// in order to deliver states as quick as possible. // in order to deliver states as quick as possible.
synchronized(getMediaHandler()) CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
synchronized(mediaHandler)
{ {
super.setState(newState, reason, reasonCode); super.setState(newState, reason, reasonCode);
if (CallPeerState.DISCONNECTED.equals(newState) if (CallPeerState.DISCONNECTED.equals(newState)
|| CallPeerState.FAILED.equals(newState)) || CallPeerState.FAILED.equals(newState))
{ {
getMediaHandler().close(); mediaHandler.close();
} }
} }
} }
@ -788,8 +792,10 @@ public void conferenceMemberAdded(CallPeerConferenceEvent conferenceEvent)
// us audio for at least two separate participants. We therefore // us audio for at least two separate participants. We therefore
// need to remove the stream level listeners and switch to CSRC // need to remove the stream level listeners and switch to CSRC
// level listening // level listening
getMediaHandler().setStreamAudioLevelListener(null); CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
getMediaHandler().setCsrcAudioLevelListener(this);
mediaHandler.setStreamAudioLevelListener(null);
mediaHandler.setCsrcAudioLevelListener(this);
} }
} }
@ -811,8 +817,10 @@ public void conferenceMemberRemoved(CallPeerConferenceEvent conferenceEvent)
// since there's only us and her in the call. Lets stop being a CSRC // since there's only us and her in the call. Lets stop being a CSRC
// listener and move back to listening the audio level of the // listener and move back to listening the audio level of the
// stream itself. // stream itself.
getMediaHandler().setStreamAudioLevelListener(this); CallPeerMediaHandler<?> mediaHandler = getMediaHandler();
getMediaHandler().setCsrcAudioLevelListener(null);
mediaHandler.setStreamAudioLevelListener(this);
mediaHandler.setCsrcAudioLevelListener(null);
} }
} }

Loading…
Cancel
Save