Fixes issues with video conferencing such as a failure of the local video to be displayed and a limitation of the conference focus with respect to requiring it to capture video as well.

cusax-fix
Lyubomir Marinov 14 years ago
parent d100493139
commit a2bf866ce0

@ -249,10 +249,9 @@ private void addVideoListener(CallPeer callPeer)
if (!isLocalVideoListenerAdded)
{
isLocalVideoListenerAdded = true;
telephony.addPropertyChangeListener(
call, videoTelephonyListener);
isLocalVideoListenerAdded = true;
}
synchronized (videoContainers)
@ -381,10 +380,7 @@ public void removeVideoListener(
isLocalVideoListenerAdded = false;
if (localVideo != null)
{
telephony.disposeLocalVisualComponent(callPeer, localVideo);
localVideo = null;
}
}
synchronized (videoContainers)
@ -976,8 +972,7 @@ private void handleLocalVideoStreamingChange(
{
try
{
videoTelephony.createLocalVisualComponent(
callPeer, listener);
videoTelephony.getLocalVisualComponent(callPeer);
}
catch (OperationFailedException ex)
{
@ -989,8 +984,6 @@ private void handleLocalVideoStreamingChange(
}
else if (localVideo != null)
{
videoTelephony.disposeLocalVisualComponent(
callPeer, localVideo);
handleVideoEvent(
callPeer.getCall(),
new VideoEvent(
@ -1452,19 +1445,19 @@ public void run()
return;
}
if(videoType == VideoEvent.LOCAL && localVideo != null)
{
localVideo.addMouseMotionListener(
localVideoListener);
localVideo.addMouseListener(
localVideoListener);
}
else if(videoType == VideoEvent.REMOTE)
switch (videoType)
{
if(allowRemoteControl)
case VideoEvent.LOCAL:
if (localVideo != null)
{
addMouseAndKeyListeners();
localVideo.addMouseMotionListener(localVideoListener);
localVideo.addMouseListener(localVideoListener);
}
break;
case VideoEvent.REMOTE:
if (allowRemoteControl)
addMouseAndKeyListeners();
break;
}
}
@ -1480,13 +1473,10 @@ public void setLocalVideoVisible(boolean isVisible)
{
this.localVideoVisible = isVisible;
if (isVisible
!= callRenderer.getCallContainer()
.isShowHideVideoButtonSelected())
{
callRenderer.getCallContainer()
.setShowHideVideoButtonSelected(isVisible);
}
CallPanel callContainer = callRenderer.getCallContainer();
if (isVisible != callContainer.isShowHideVideoButtonSelected())
callContainer.setShowHideVideoButtonSelected(isVisible);
int videoContainerCount;

@ -11,12 +11,10 @@
import java.util.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*;
import net.java.sip.communicator.impl.protocol.jabber.jinglesdp.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.media.*;
import net.java.sip.communicator.util.*;
import org.jitsi.impl.neomedia.transform.sdes.*;
import org.jitsi.service.neomedia.*;
import org.jivesoftware.smack.packet.*;
@ -248,30 +246,21 @@ protected SrtpCryptoAttribute selectSdesCryptoSuite(
SDesControl sDesControl,
EncryptionPacketExtension encryptionPacketExtension)
{
List<CryptoPacketExtension> cryptoPacketExtensions =
encryptionPacketExtension.getCryptoList();
List<CryptoPacketExtension> cryptoPacketExtensions
= encryptionPacketExtension.getCryptoList();
Vector<SrtpCryptoAttribute> peerAttributes
= new Vector<SrtpCryptoAttribute>(cryptoPacketExtensions.size());
for(int i = 0; i < cryptoPacketExtensions.size(); ++i)
{
peerAttributes.add(
cryptoPacketExtensions.get(i).toSrtpCryptoAttribute());
}
for (CryptoPacketExtension cpe : cryptoPacketExtensions)
peerAttributes.add(cpe.toSrtpCryptoAttribute());
if(peerAttributes == null)
{
if (peerAttributes == null)
return null;
}
if(isInitiator)
{
if (isInitiator)
return sDesControl.initiatorSelectAttribute(peerAttributes);
}
else
{
return sDesControl.responderSelectAttribute(peerAttributes);
}
}
/**

@ -170,7 +170,7 @@ public CallPeerJabberImpl processSessionInitiate(JingleIQ jingleIQ)
CoinPacketExtension.ELEMENT_NAME,
CoinPacketExtension.NAMESPACE);
if(coin != null)
if (coin != null)
{
boolean b
= Boolean.parseBoolean(
@ -186,21 +186,23 @@ public CallPeerJabberImpl processSessionInitiate(JingleIQ jingleIQ)
// if paranoia is set, to accept the call we need to know that
// the other party has support for media encryption
if(getProtocolProvider().getAccountID().getAccountPropertyBoolean(
if (getProtocolProvider().getAccountID().getAccountPropertyBoolean(
ProtocolProviderFactory.MODE_PARANOIA, false)
&& callPeer.getMediaHandler().getAdvertisedEncryptionMethods().length
&& callPeer.getMediaHandler().getAdvertisedEncryptionMethods()
.length
== 0)
{
//send an error response;
String reasonText =
JabberActivator.getResources().getI18NString(
"service.gui.security.encryption.required");
JingleIQ errResp = JinglePacketFactory.createSessionTerminate(
jingleIQ.getTo(),
jingleIQ.getFrom(),
jingleIQ.getSID(),
Reason.SECURITY_ERROR,
reasonText);
String reasonText
= JabberActivator.getResources().getI18NString(
"service.gui.security.encryption.required");
JingleIQ errResp
= JinglePacketFactory.createSessionTerminate(
jingleIQ.getTo(),
jingleIQ.getFrom(),
jingleIQ.getSID(),
Reason.SECURITY_ERROR,
reasonText);
callPeer.setState(CallPeerState.FAILED, reasonText);
getProtocolProvider().getConnection().sendPacket(errResp);
@ -208,13 +210,13 @@ public CallPeerJabberImpl processSessionInitiate(JingleIQ jingleIQ)
return null;
}
if( callPeer.getState() == CallPeerState.FAILED)
if (callPeer.getState() == CallPeerState.FAILED)
return null;
callPeer.setState( CallPeerState.INCOMING_CALL );
// in case of attended transfer, auto answer the call
if(autoAnswer)
if (autoAnswer)
{
/* answer directly */
try
@ -256,35 +258,36 @@ public CallPeerJabberImpl processSessionInitiate(JingleIQ jingleIQ)
directions.put(MediaType.AUDIO, MediaDirection.INACTIVE);
directions.put(MediaType.VIDEO, MediaDirection.INACTIVE);
for(ContentPacketExtension c : offer)
for (ContentPacketExtension c : offer)
{
String contentName = c.getName();
MediaDirection remoteDirection
= JingleUtils.getDirection(c, callPeer.isInitiator());
if(MediaType.AUDIO.toString().equals(contentName))
if (MediaType.AUDIO.toString().equals(contentName))
directions.put(MediaType.AUDIO, remoteDirection);
else if(MediaType.VIDEO.toString().equals(contentName))
else if (MediaType.VIDEO.toString().equals(contentName))
directions.put(MediaType.VIDEO, remoteDirection);
}
// if this was the first peer we added in this call then the call is
// new and we also need to notify everyone of its creation.
if(this.getCallPeerCount() == 1 && getCallGroup() == null)
parentOpSet.fireCallEvent(CallEvent.CALL_RECEIVED, this,
directions);
if ((getCallPeerCount() == 1) && (getCallGroup() == null))
{
parentOpSet.fireCallEvent(
CallEvent.CALL_RECEIVED,
this,
directions);
}
// Manages auto answer with "audio only", or "audio / video" answer.
OperationSetAutoAnswerJabberImpl autoAnswerOpSet
= (OperationSetAutoAnswerJabberImpl)
this.getProtocolProvider()
.getOperationSet(OperationSetBasicAutoAnswer.class);
getProtocolProvider().getOperationSet(
OperationSetBasicAutoAnswer.class);
if(autoAnswerOpSet != null)
{
if (autoAnswerOpSet != null)
autoAnswerOpSet.autoAnswer(this, directions);
}
return callPeer;
}
@ -352,9 +355,11 @@ else if (callGroup != null)
/* enable remote-control if it is a desktop sharing session */
mediaHandler.setLocalInputEvtAware(getLocalInputEvtAware());
//set call state to connecting so that the user interface would start
//playing the tones. we do that here because we may be harvesting
//STUN/TURN addresses in initiateSession() which would take a while.
/*
* Set call state to connecting so that the user interface would start
* playing the tones. We do that here because we may be harvesting
* STUN/TURN addresses in initiateSession() which would take a while.
*/
callPeer.setState(CallPeerState.CONNECTING);
// if initializing session fails, set peer to failed
@ -375,21 +380,25 @@ else if (callGroup != null)
}
/**
* Send a <tt>content-modify</tt> message for all current <tt>CallPeer</tt>
* to reflect possible video change in media setup.
* Sends a <tt>content-modify</tt> message to each of the current
* <tt>CallPeer</tt>s to reflect a possible change in the media setup
* related to video.
*
* @param allowed if the local video is allowed or not
* @throws OperationFailedException if problem occurred during message
* generation or network problem
* @param allowed <tt>true</tt> if the streaming of the local video to the
* remote peer is allowed; otherwise, <tt>false</tt>
* @throws OperationFailedException if a problem occurred during message
* generation or there was a network problem
*/
public void modifyVideoContent(boolean allowed)
throws OperationFailedException
{
if(logger.isInfoEnabled())
logger.info(allowed ? "Start local video streaming" :
"Stop local video streaming");
if (logger.isInfoEnabled())
{
logger.info(
(allowed ? "Start" : "Stop") + " local video streaming");
}
for(CallPeerJabberImpl peer : getCallPeersVector())
for (CallPeerJabberImpl peer : getCallPeersVector())
peer.sendModifyVideoContent(allowed);
}

@ -13,7 +13,6 @@
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.service.protocol.media.*;
import net.java.sip.communicator.util.*;
import org.jivesoftware.smack.packet.*;

@ -963,7 +963,7 @@ else if(senders == SendersEnum.none)
{
logger.warn("Exception occurred during media reinitialization", e);
}
}
}
/**
* Processes the content-add {@link JingleIQ}.
@ -980,8 +980,15 @@ public void processContentAdd(final JingleIQ content)
boolean noCands = false;
logger.info("nocand " + noCands);
logger.info("run code");
/*
* If a remote peer turns her video on in a conference which is hosted
* by the local peer and the local peer is not streaming her local
* video, reinvite the other remote peers to enable RTP translation.
*/
MediaStream oldVideoStream = mediaHandler.getStream(MediaType.VIDEO);
try
{
if(!contentAddWithNoCands)
@ -1063,6 +1070,30 @@ public void run()
getProtocolProvider().getConnection().sendPacket(contentIQ);
mediaHandler.start();
/*
* If a remote peer turns her video on in a conference which is hosted
* by the local peer and the local peer is not streaming her local
* video, reinvite the other remote peers to enable RTP translation.
*/
if (oldVideoStream == null)
{
MediaStream newVideoStream
= mediaHandler.getStream(MediaType.VIDEO);
if ((newVideoStream != null)
&& mediaHandler.isRTPTranslationEnabled())
{
try
{
getCall().modifyVideoContent(true);
}
catch (OperationFailedException ofe)
{
logger.error("Failed to enable RTP translation", ofe);
}
}
}
}
/**
@ -1111,8 +1142,8 @@ public void processContentModify(JingleIQ content)
try
{
boolean modify = false;
if(ext.getFirstChildOfType(RtpDescriptionPacketExtension.class) !=
null)
if(ext.getFirstChildOfType(RtpDescriptionPacketExtension.class)
!= null)
{
modify = true;
}

@ -54,7 +54,7 @@ public class CallPeerMediaHandlerJabberImpl
* remote side. We use {@link LinkedHashMap}s to make sure that we preserve
* the order of the individual content extensions.
*/
private Map<String, ContentPacketExtension> localContentMap
private final Map<String, ContentPacketExtension> localContentMap
= new LinkedHashMap<String, ContentPacketExtension>();
/**
@ -62,7 +62,7 @@ public class CallPeerMediaHandlerJabberImpl
* We use {@link LinkedHashMap}s to make sure that we preserve
* the order of the individual content extensions.
*/
private Map<String, ContentPacketExtension> remoteContentMap
private final Map<String, ContentPacketExtension> remoteContentMap
= new LinkedHashMap<String, ContentPacketExtension>();
/**
@ -77,9 +77,9 @@ public class CallPeerMediaHandlerJabberImpl
private boolean supportQualityControls = false;
/**
* The current quality controls for this peer media handler if any.
* The <tt>QualityControl</tt> of this <tt>CallPeerMediaHandler</tt>.
*/
private QualityControlWrapper qualityControls = null;
private final QualityControlWrapper qualityControls;
/**
* Creates a new handler that will be managing media streams for
@ -91,6 +91,7 @@ public class CallPeerMediaHandlerJabberImpl
public CallPeerMediaHandlerJabberImpl(CallPeerJabberImpl peer)
{
super(peer);
qualityControls = new QualityControlWrapper(peer);
}
@ -196,7 +197,7 @@ protected MediaStream initStream(String streamName,
MediaStreamTarget target,
MediaDirection direction,
List<RTPExtension> rtpExtensions,
boolean masterStream)
boolean masterStream)
throws OperationFailedException
{
MediaStream stream
@ -209,7 +210,7 @@ protected MediaStream initStream(String streamName,
rtpExtensions,
masterStream);
if(stream != null)
if (stream != null)
stream.setName(streamName);
return stream;
@ -266,8 +267,8 @@ public void processOffer(List<ContentPacketExtension> offer)
// determine the direction that we need to announce.
MediaDirection remoteDirection = JingleUtils.getDirection(
content, getPeer().isInitiator());
MediaDirection direction = devDirection
.getDirectionForAnswer(remoteDirection);
MediaDirection direction
= devDirection.getDirectionForAnswer(remoteDirection);
// intersect the MediaFormats of our device with remote ones
List<MediaFormat> mutuallySupportedFormats
@ -315,14 +316,14 @@ public void processOffer(List<ContentPacketExtension> offer)
: (target != null) ? target.getDataAddress().getPort() : 0;
/*
* TODO If the offered transport is not supported, attempt to
* fall back to a supported one using transport-replace.
* TODO If the offered transport is not supported, attempt to fall
* back to a supported one using transport-replace.
*/
setTransportManager(transport.getNamespace());
if (mutuallySupportedFormats.isEmpty()
|| (devDirection == MediaDirection.INACTIVE)
|| (targetDataPort == 0))
|| (devDirection == MediaDirection.INACTIVE)
|| (targetDataPort == 0))
{
// skip stream and continue. contrary to sip we don't seem to
// need to send per-stream disabling answer and only one at the
@ -368,8 +369,8 @@ public void processOffer(List<ContentPacketExtension> offer)
description);
}
// got an content which have inputevt, it means that peer requests
// a desktop sharing session so tell it we support inputevt
// Got a content which has inputevt. It means that the peer requests
// a desktop sharing session so tell it we support inputevt.
if(content.getChildExtensionsOfType(InputEvtPacketExtension.class)
!= null)
{
@ -449,14 +450,17 @@ public Iterable<ContentPacketExtension> generateSessionAccept()
RtpDescriptionPacketExtension> contents
= new HashMap<ContentPacketExtension,
RtpDescriptionPacketExtension>();
for(ContentPacketExtension ourContent : sessAccept)
{
RtpDescriptionPacketExtension description
= JingleUtils.getRtpDescription(ourContent);
= JingleUtils.getRtpDescription(ourContent);
contents.put(ourContent, description);
}
boolean masterStreamSet = false;
for(Map.Entry<ContentPacketExtension, RtpDescriptionPacketExtension> en
: contents.entrySet())
{
@ -480,9 +484,11 @@ public Iterable<ContentPacketExtension> generateSessionAccept()
= JingleUtils.getDirection(ourContent, !peer.isInitiator());
// if we answer with video, tell remote peer that video direction is
// sendrecv, and whether video device can capture(send)
if(type == MediaType.VIDEO && isLocalVideoTransmissionEnabled()
&& dev.getDirection().allowsSending())
// sendrecv, and whether video device can capture/send
if ((type == MediaType.VIDEO)
&& (isLocalVideoTransmissionEnabled()
|| isRTPTranslationEnabled())
&& dev.getDirection().allowsSending())
{
direction = MediaDirection.SENDRECV;
ourContent.setSenders(ContentPacketExtension.SendersEnum.both);
@ -503,15 +509,13 @@ public Iterable<ContentPacketExtension> generateSessionAccept()
= JingleUtils.payloadTypeToMediaFormat(
payload,
getDynamicPayloadTypes());
if(format != null)
break;
}
if(format == null)
{
ProtocolProviderServiceJabberImpl.
throwOperationFailedException(
ProtocolProviderServiceJabberImpl.throwOperationFailedException(
"No matching codec.",
OperationFailedException.ILLEGAL_ARGUMENT,
null,
@ -521,8 +525,9 @@ public Iterable<ContentPacketExtension> generateSessionAccept()
//extract the extensions that we are advertising:
// check whether we will be exchanging any RTP extensions.
List<RTPExtension> rtpExtensions
= JingleUtils.extractRTPExtensions(
description, this.getRtpExtensionsRegistry());
= JingleUtils.extractRTPExtensions(
description,
this.getRtpExtensionsRegistry());
Map<String, String> adv = format.getAdvancedAttributes();
if(adv != null)
@ -554,8 +559,15 @@ public Iterable<ContentPacketExtension> generateSessionAccept()
}
// create the corresponding stream...
initStream(ourContent.getName(), connector, dev, format, target,
direction, rtpExtensions, masterStream);
initStream(
ourContent.getName(),
connector,
dev,
format,
target,
direction,
rtpExtensions,
masterStream);
}
return sessAccept;
}
@ -573,11 +585,16 @@ public Iterable<ContentPacketExtension> generateSessionAccept()
private ContentPacketExtension createContent(MediaDevice dev)
throws OperationFailedException
{
MediaDirection direction
= dev.getDirection().and(
getDirectionUserPreference(dev.getMediaType()));
MediaType mediaType = dev.getMediaType();
MediaDirection direction = dev.getDirection();
if(isLocallyOnHold())
/*
* In the case of RTP translation performed by the conference focus,
* the conference focus is not required to capture media.
*/
if (!(MediaType.VIDEO.equals(mediaType) && isRTPTranslationEnabled()))
direction = direction.and(getDirectionUserPreference(mediaType));
if (isLocallyOnHold())
direction = direction.and(MediaDirection.SENDONLY);
QualityPreset sendQualityPreset = null;
@ -585,36 +602,30 @@ private ContentPacketExtension createContent(MediaDevice dev)
if(qualityControls != null)
{
// the one we will send is the one the other part has announced
// as receive
// the one we will send is the one the remote has announced as
// receive
sendQualityPreset = qualityControls.getRemoteReceivePreset();
// the one we want to receive is the setting that remote
// can send
// the one we want to receive is the one the remote can send
receiveQualityPreset = qualityControls.getRemoteSendMaxPreset();
}
if(direction != MediaDirection.INACTIVE)
{
ContentPacketExtension content = createContentForOffer(
dev.getSupportedFormats(sendQualityPreset,
receiveQualityPreset), direction,
dev.getSupportedExtensions());
RtpDescriptionPacketExtension description =
JingleUtils.getRtpDescription(content);
ContentPacketExtension content
= createContentForOffer(
dev.getSupportedFormats(
sendQualityPreset,
receiveQualityPreset),
direction,
dev.getSupportedExtensions());
RtpDescriptionPacketExtension description
= JingleUtils.getRtpDescription(content);
//SDES
// It is important to set SDES before ZRTP in order to make GTALK
// application able to work with SDES.
setSDesEncryptionToDescription(
dev.getMediaType(),
description,
null);
setSDesEncryptionToDescription(mediaType, description, null);
//ZRTP
setZrtpEncryptionToDescription(
dev.getMediaType(),
description,
null);
setZrtpEncryptionToDescription(mediaType, description, null);
return content;
}
@ -657,40 +668,41 @@ public List<ContentPacketExtension> createContentList(MediaType mediaType)
{
MediaDevice dev = getDefaultDevice(mediaType);
List<ContentPacketExtension> mediaDescs
= new ArrayList<ContentPacketExtension>();
= new ArrayList<ContentPacketExtension>();
if (dev != null)
{
ContentPacketExtension content = createContent(dev);
if(content != null)
if (content != null)
mediaDescs.add(content);
}
//fail if all devices were inactive
if(mediaDescs.isEmpty())
// Fail if no media is described (e.g. all devices are inactive).
if (mediaDescs.isEmpty())
{
ProtocolProviderServiceJabberImpl
.throwOperationFailedException(
ProtocolProviderServiceJabberImpl.throwOperationFailedException(
"We couldn't find any active Audio/Video devices and "
+ "couldn't create a call",
OperationFailedException.GENERAL_ERROR, null, logger);
OperationFailedException.GENERAL_ERROR,
null,
logger);
}
TransportInfoSender transportInfoSender =
getTransportManager().getXmlNamespace().equals(
ProtocolProviderServiceJabberImpl.URN_GOOGLE_TRANSPORT_P2P)
// Describe the transport(s).
TransportInfoSender transportInfoSender
= getTransportManager().getXmlNamespace().equals(
ProtocolProviderServiceJabberImpl.URN_GOOGLE_TRANSPORT_P2P)
? new TransportInfoSender()
{
public void sendTransportInfo(
Iterable<ContentPacketExtension> contents)
{
getPeer().sendTransportInfo(contents);
}
}
{
public void sendTransportInfo(
Iterable<ContentPacketExtension> contents)
{
getPeer().sendTransportInfo(contents);
}
}
: null;
//now add the transport elements
return harvestCandidates(null, mediaDescs, transportInfoSender);
}
@ -709,7 +721,7 @@ public void sendTransportInfo(
public List<ContentPacketExtension> createContentList()
throws OperationFailedException
{
//Audio Media Description
// Describe the media.
List<ContentPacketExtension> mediaDescs
= new ArrayList<ContentPacketExtension>();
@ -719,11 +731,19 @@ public List<ContentPacketExtension> createContentList()
if (dev != null)
{
MediaDirection direction
= dev.getDirection().and(
getDirectionUserPreference(mediaType));
MediaDirection direction = dev.getDirection();
if(isLocallyOnHold())
/*
* In the case of RTP translation performed by the conference
* focus, the conference focus is not required to capture media.
*/
if (!(MediaType.VIDEO.equals(mediaType)
&& isRTPTranslationEnabled()))
{
direction
= direction.and(getDirectionUserPreference(mediaType));
}
if (isLocallyOnHold())
direction = direction.and(MediaDirection.SENDONLY);
/*
@ -734,16 +754,15 @@ public List<ContentPacketExtension> createContentList()
if (MediaDirection.RECVONLY.equals(direction))
direction = MediaDirection.INACTIVE;
if(direction != MediaDirection.INACTIVE)
if (direction != MediaDirection.INACTIVE)
{
ContentPacketExtension content
= createContentForOffer(
dev.getSupportedFormats(),
direction,
dev.getSupportedExtensions());
RtpDescriptionPacketExtension description =
JingleUtils.getRtpDescription(content);
RtpDescriptionPacketExtension description
= JingleUtils.getRtpDescription(content);
//SDES
// It is important to set SDES before ZRTP in order to make
@ -752,17 +771,16 @@ public List<ContentPacketExtension> createContentList()
mediaType,
description,
null);
//ZRTP
setZrtpEncryptionToDescription(
mediaType,
description,
null);
/* we request a desktop sharing session so add the inputevt
* extension in the "video" content
*/
if(description.getMedia().equals(MediaType.VIDEO.toString())
// we request a desktop sharing session so add the inputevt
// extension in the "video" content
if (description.getMedia().equals(
MediaType.VIDEO.toString())
&& getLocalInputEvtAware())
{
content.addChildExtension(
@ -774,8 +792,8 @@ && getLocalInputEvtAware())
}
}
//fail if all devices were inactive
if(mediaDescs.isEmpty())
// Fail if no media is described (e.g. all devices are inactive).
if (mediaDescs.isEmpty())
{
ProtocolProviderServiceJabberImpl.throwOperationFailedException(
"We couldn't find any active Audio/Video devices"
@ -785,6 +803,7 @@ && getLocalInputEvtAware())
logger);
}
// Describe the transport(s).
TransportInfoSender transportInfoSender
= getTransportManager().getXmlNamespace().equals(
ProtocolProviderServiceJabberImpl.URN_GOOGLE_TRANSPORT_P2P)
@ -798,7 +817,6 @@ public void sendTransportInfo(
}
: null;
//now add the transport elements
return harvestCandidates(null, mediaDescs, transportInfoSender);
}
@ -849,8 +867,8 @@ private ContentPacketExtension createContentForOffer(
* processing to stop (method setState in CallPeer).
*/
public void reinitAllContents()
throws OperationFailedException,
IllegalArgumentException
throws OperationFailedException,
IllegalArgumentException
{
boolean masterStreamSet = false;
for(String key : remoteContentMap.keySet())
@ -982,15 +1000,17 @@ private void removeContent(
* in this operation can synchronize to the mediaHandler instance to wait
* processing to stop (method setState in CallPeer).
*/
private void processContent(ContentPacketExtension content, boolean modify,
boolean masterStream)
private void processContent(
ContentPacketExtension content,
boolean modify,
boolean masterStream)
throws OperationFailedException,
IllegalArgumentException
{
RtpDescriptionPacketExtension description
= JingleUtils.getRtpDescription(content);
MediaType mediaType
= MediaType.parseString( description.getMedia() );
= MediaType.parseString(description.getMedia());
//stream target
TransportManagerJabberImpl transportManager = getTransportManager();
@ -1480,32 +1500,34 @@ private MediaDirection calculatePostHoldDirection(MediaStream stream)
{
MediaDirection streamDirection = stream.getDirection();
if(streamDirection.allowsSending())
if (streamDirection.allowsSending())
return streamDirection;
//when calculating a direction we need to take into account 1) what
//direction the remote party had asked for before putting us on hold,
//2) what the user preference is for the stream's media type, 3) our
//local hold status, 4) the direction supported by the device this
//stream is reading from.
/*
* When calculating a direction we need to take into account 1) what
* direction the remote party had asked for before putting us on hold,
* 2) what the user preference is for the stream's media type, 3) our
* local hold status, 4) the direction supported by the device this
* stream is reading from.
*/
//1. check what the remote party originally told us (from our persp.)
// 1. what the remote party originally told us (from our perspective)
ContentPacketExtension content = remoteContentMap.get(stream.getName());
MediaDirection postHoldDir
= JingleUtils.getDirection(content, !getPeer().isInitiator());
MediaDirection postHoldDir = JingleUtils.getDirection(content,
!getPeer().isInitiator());
//2. check the user preference.
// 2. the user preference
MediaDevice device = stream.getDevice();
postHoldDir
= postHoldDir.and(
getDirectionUserPreference(device.getMediaType()));
//3. check our local hold status.
if(isLocallyOnHold())
postHoldDir.and(MediaDirection.SENDONLY);
// 3. our local hold status
if (isLocallyOnHold())
postHoldDir = postHoldDir.and(MediaDirection.SENDONLY);
//4. check the device direction.
// 4. the device direction
postHoldDir = postHoldDir.and(device.getDirection());
stream.setDirection(postHoldDir);

@ -40,10 +40,6 @@ public class InfoRetreiver
private static final String TAG_FN_OPEN = "<FN>";
private static final String TAG_FN_CLOSE = "</FN>";
// the uin of the account using us,
// used when sending commands for user info to the server
private final String ownerUin;
/**
* The timeout to wait before considering vcard has time outed.
*/
@ -54,7 +50,6 @@ protected InfoRetreiver(
String ownerUin)
{
this.jabberProvider = jabberProvider;
this.ownerUin = ownerUin;
vcardTimeoutReply
= JabberActivator.getConfigurationService().getLong(

@ -178,7 +178,7 @@ private SmackServiceNode.MappedNodes searchServicesWithPrefix(
for(Map.Entry<String, TrackerEntry> entry
: service.getTrackerEntries().entrySet())
{
service.deepSearch(
SmackServiceNode.deepSearch(
xmppConnection,
maxEntries,
entry.getValue().getJid(),
@ -209,7 +209,7 @@ private SmackServiceNode.MappedNodes searchServicesWithPrefix(
return mappedNodes;
// Request to Server
service.deepSearch(
SmackServiceNode.deepSearch(
xmppConnection,
maxEntries,
xmppConnection.getHost(),
@ -232,7 +232,7 @@ private SmackServiceNode.MappedNodes searchServicesWithPrefix(
final Presence presence = i.next();
if (presence.isAvailable())
{
service.deepSearch(
SmackServiceNode.deepSearch(
xmppConnection,
maxEntries,
presence.getFrom(),
@ -314,7 +314,7 @@ private static boolean searchDiscoItems(
if( !StringUtils.isNullOrEmpty(pref)
&& item.getEntityID().startsWith(pref.trim()))
{
service.deepSearch(
SmackServiceNode.deepSearch(
xmppConnection,
maxEntries,
item.getEntityID(),
@ -339,7 +339,7 @@ private static boolean searchDiscoItems(
// we may searched already this node if it starts
// with some of the prefixes
if(!visited.containsKey(item.getEntityID()))
service.deepSearch(
SmackServiceNode.deepSearch(
xmppConnection,
maxEntries,
item.getEntityID(),

@ -277,9 +277,9 @@ public Call createCall(Contact callee)
boolean isGoogle = protocolProvider.isGmailOrGoogleAppsAccount();
boolean isGoogleVoice = false;
if(isGoogle)
if (isGoogle)
{
if(!calleeAddress.contains("@"))
if (!calleeAddress.contains("@"))
{
calleeAddress += "@" + GOOGLE_VOICE_DOMAIN;
isGoogleVoice = true;
@ -293,34 +293,30 @@ else if(calleeAddress.endsWith(GOOGLE_VOICE_DOMAIN))
// if address is not suffixed by @domain, add the default domain
// corresponding to account domain or via the OVERRIDE_PHONE_SUFFIX
// property if defined
if(calleeAddress.indexOf('@') == -1)
AccountID accountID = getProtocolProvider().getAccountID();
if (calleeAddress.indexOf('@') == -1)
{
String phoneSuffix
= accountID.getAccountPropertyString("OVERRIDE_PHONE_SUFFIX");
String serviceName = null;
String phoneSuffix =
(String)getProtocolProvider().getAccountID().getAccountProperty(
"OVERRIDE_PHONE_SUFFIX");
if(phoneSuffix == null || phoneSuffix.length() == 0)
{
serviceName = "@" + StringUtils.parseServer(
getProtocolProvider().getAccountID().getUserID());
}
if ((phoneSuffix == null) || (phoneSuffix.length() == 0))
serviceName = StringUtils.parseServer(accountID.getUserID());
else
{
serviceName = "@" + phoneSuffix;
}
calleeAddress += serviceName;
serviceName = phoneSuffix;
calleeAddress = calleeAddress + "@" + serviceName;
}
AccountID accountID = getProtocolProvider().getAccountID();
String bypassDomain = accountID.getAccountPropertyString(
"TELEPHONY_BYPASS_GTALK_CAPS");
boolean alwaysCallGtalk = ((bypassDomain != null) &&
bypassDomain.equals(calleeAddress.substring(
calleeAddress.indexOf('@') + 1))) || isGoogleVoice;
boolean alwaysCallGtalk
= ((bypassDomain != null)
&& bypassDomain.equals(
calleeAddress.substring(
calleeAddress.indexOf('@') + 1)))
|| isGoogleVoice;
// we determine on which resource the remote user is connected if the
// resource isn't already provided
@ -337,9 +333,9 @@ else if(calleeAddress.endsWith(GOOGLE_VOICE_DOMAIN))
OperationFailedException.NOT_FOUND);
}
Iterator<Presence> it =
getProtocolProvider().getConnection().getRoster().getPresences(
calleeAddress);
Iterator<Presence> it
= getProtocolProvider().getConnection().getRoster().getPresences(
calleeAddress);
String calleeURI = null;
boolean isGingle = false;
@ -352,17 +348,19 @@ else if(calleeAddress.endsWith(GOOGLE_VOICE_DOMAIN))
while(it.hasNext())
{
Presence presence = it.next();
int priority = (presence.getPriority() == Integer.MIN_VALUE) ? 0 :
presence.getPriority();
int priority
= (presence.getPriority() == Integer.MIN_VALUE)
? 0
: presence.getPriority();
calleeURI = presence.getFrom();
DiscoverInfo discoverInfo = null;
try
{
// check if the remote client supports telephony.
discoverInfo =
protocolProvider.getDiscoveryManager().
discoverInfo(calleeURI);
discoverInfo
= protocolProvider.getDiscoveryManager().discoverInfo(
calleeURI);
}
catch (XMPPException ex)
{
@ -489,7 +487,7 @@ else if(di != null)
fullCalleeURI,
sessionInitiateExtensions);
}
else if(di != null)
else if (di != null)
{
peer
= call.initiateSession(
@ -498,7 +496,7 @@ else if(di != null)
sessionInitiateExtensions);
}
}
catch(Throwable t)
catch (Throwable t)
{
/*
* The Javadoc on ThreadDeath says: If ThreadDeath is caught by a
@ -626,7 +624,7 @@ public synchronized void hangupCallPeer(CallPeer peer)
throws ClassCastException,
OperationFailedException
{
this.hangupCallPeer(peer, HANGUP_REASON_NORMAL_CLEARING, null);
hangupCallPeer(peer, HANGUP_REASON_NORMAL_CLEARING, null);
}
/**
@ -649,14 +647,15 @@ public void hangupCallPeer(CallPeer peer,
// if we are failing a peer and have a reason, add the reason packet
// extension
ReasonPacketExtension reasonPacketExt = null;
if(failed && reasonText != null)
if (failed && (reasonText != null))
{
Reason reason = convertReasonCodeToSIPCode(reasonCode);
if(reason != null)
if (reason != null)
{
reasonPacketExt = new ReasonPacketExtension(
reason, reasonText, null);
reasonPacketExt
= new ReasonPacketExtension(reason, reasonText, null);
}
}
@ -892,10 +891,6 @@ else if(packet instanceof SessionIQ)
*/
public void processPacket(Packet packet)
{
//this is not supposed to happen because of the filter ... but still
if (! (packet instanceof JingleIQ) && !(packet instanceof SessionIQ))
return;
if(packet instanceof JingleIQ)
{
JingleIQ jingleIQ = (JingleIQ)packet;

@ -120,8 +120,8 @@ public OperationSetDesktopSharingServerJabberImpl(
public Call createVideoCall(String uri, MediaDevice device)
throws OperationFailedException, ParseException
{
MediaAwareCall call
= (MediaAwareCall) super.createVideoCall(uri, device);
MediaAwareCall<?,?,?> call
= (MediaAwareCall<?,?,?>) super.createVideoCall(uri, device);
size
= (((VideoMediaFormat)
@ -149,8 +149,8 @@ public Call createVideoCall(String uri, MediaDevice device)
public Call createVideoCall(Contact callee, MediaDevice device)
throws OperationFailedException
{
MediaAwareCall call
= (MediaAwareCall) super.createVideoCall(callee, device);
MediaAwareCall<?,?,?> call
= (MediaAwareCall<?,?,?>) super.createVideoCall(callee, device);
size
= ((VideoMediaFormat)
@ -226,8 +226,10 @@ protected Call createOutgoingVideoCall(
else
{
if (logger.isInfoEnabled())
logger.info(fullCalleeURI +
": remote-control not supported!");
{
logger.info(
fullCalleeURI + ": remote-control not supported!");
}
// TODO fail or not?
/*
@ -262,8 +264,7 @@ protected Call createOutgoingVideoCall(
basicTelephony.createOutgoingCall(call, calleeAddress);
CallPeerJabberImpl callPeer
= new CallPeerJabberImpl(calleeAddress, call);
new CallPeerJabberImpl(calleeAddress, call);
return call;
}
@ -307,7 +308,7 @@ public void setLocalVideoAllowed(Call call,
boolean allowed)
throws OperationFailedException
{
((AbstractCallJabberGTalkImpl) call).setLocalInputEvtAware(allowed);
((AbstractCallJabberGTalkImpl<?>) call).setLocalInputEvtAware(allowed);
super.setLocalVideoAllowed(call, mediaDevice, allowed);
}
@ -654,11 +655,13 @@ private void receivedResponseToIqStop(CallPeer callPeer, Packet p)
public boolean isRemoteControlAvailable(CallPeer callPeer)
{
DiscoverInfo discoverInfo
= ((AbstractCallPeerJabberGTalkImpl) callPeer).getDiscoverInfo();
return (this.parentProvider.getDiscoveryManager()
.includesFeature(InputEvtIQ.NAMESPACE_SERVER)
&& discoverInfo != null
&& discoverInfo.containsFeature(
InputEvtIQ.NAMESPACE_CLIENT));
= ((AbstractCallPeerJabberGTalkImpl<?,?>) callPeer)
.getDiscoverInfo();
return
parentProvider.getDiscoveryManager().includesFeature(
InputEvtIQ.NAMESPACE_SERVER)
&& (discoverInfo != null)
&& discoverInfo.containsFeature(InputEvtIQ.NAMESPACE_CLIENT);
}
}

@ -122,7 +122,7 @@ public Call createVideoCall(String uri)
{
Call call = createOutgoingVideoCall(uri);
MediaDevice device
= ((MediaAwareCall) call).getDefaultDevice(MediaType.VIDEO);
= ((MediaAwareCall<?,?,?>) call).getDefaultDevice(MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize());
origin = getOriginForMediaDevice(device);
return call;
@ -146,7 +146,7 @@ public Call createVideoCall(Contact callee) throws OperationFailedException
{
Call call = createOutgoingVideoCall(callee.getAddress());
MediaDevice device
= ((MediaAwareCall) call).getDefaultDevice(MediaType.VIDEO);
= ((MediaAwareCall<?,?,?>) call).getDefaultDevice(MediaType.VIDEO);
size = (((VideoMediaFormat)device.getFormat()).getSize());
origin = getOriginForMediaDevice(device);
return call;
@ -192,8 +192,8 @@ public void setLocalVideoAllowed(Call call,
boolean allowed)
throws OperationFailedException
{
AbstractCallJabberGTalkImpl callImpl
= (AbstractCallJabberGTalkImpl) call;
AbstractCallJabberGTalkImpl<?> callImpl
= (AbstractCallJabberGTalkImpl<?>) call;
if (mediaDevice == null)
{
@ -228,8 +228,9 @@ public void setLocalVideoAllowed(Call call,
*/
public boolean isLocalVideoAllowed(Call call)
{
return ((MediaAwareCall<?, ?, ?>)call).
isLocalVideoAllowed(MediaUseCase.DESKTOP);
return
((MediaAwareCall<?, ?, ?>)call).isLocalVideoAllowed(
MediaUseCase.DESKTOP);
}
/**
@ -275,16 +276,13 @@ protected Call createOutgoingVideoCall(String calleeAddress,
*/
public boolean isPartialStreaming(Call call)
{
MediaAwareCall callImpl = (MediaAwareCall)call;
MediaAwareCall<?,?,?> callImpl = (MediaAwareCall<?,?,?>) call;
MediaDevice device = callImpl.getDefaultDevice(MediaType.VIDEO);
if(device != null)
{
MediaService mediaService = JabberActivator.getMediaService();
return mediaService.isPartialStreaming(device);
}
return false;
return
(device == null)
? false
: JabberActivator.getMediaService().isPartialStreaming(device);
}
/**
@ -297,13 +295,14 @@ public boolean isPartialStreaming(Call call)
*/
public void movePartialDesktopStreaming(Call call, int x, int y)
{
AbstractCallJabberGTalkImpl callImpl
= (AbstractCallJabberGTalkImpl) call;
AbstractCallPeerJabberGTalkImpl callPeerImpl
= (AbstractCallPeerJabberGTalkImpl) callImpl.getCallPeers().next();
VideoMediaStream videoStream = (VideoMediaStream)
callPeerImpl.getMediaHandler().getStream(
MediaType.VIDEO);
AbstractCallJabberGTalkImpl<?> callImpl
= (AbstractCallJabberGTalkImpl<?>) call;
AbstractCallPeerJabberGTalkImpl<?,?> callPeerImpl
= (AbstractCallPeerJabberGTalkImpl<?,?>)
callImpl.getCallPeers().next();
VideoMediaStream videoStream
= (VideoMediaStream)
callPeerImpl.getMediaHandler().getStream(MediaType.VIDEO);
if(videoStream != null)
{

@ -462,15 +462,16 @@ protected CallPeer inviteCalleeToCall(
if (!wasConferenceFocus && call.isConferenceFocus())
{
/*
* Re-INVITE existing CallPeers to inform them that from now
* the specified call is a conference call.
* Re-INVITE existing CallPeers to inform them that from now the
* specified call is a conference call.
*/
Iterator<CallPeerJabberImpl> callPeerIter = call.getCallPeers();
while (callPeerIter.hasNext())
{
CallPeerJabberImpl callPeer = callPeerIter.next();
if(callPeer.getState() == CallPeerState.CONNECTED)
if (callPeer.getState() == CallPeerState.CONNECTED)
callPeer.sendCoinSessionInfo(true);
}
}
@ -551,31 +552,27 @@ public boolean accept(Packet packet)
*/
public void processPacket(Packet packet)
{
CoinIQ coinIQ = (CoinIQ)packet;
CoinIQ coinIQ = (CoinIQ) packet;
//first ack all "set" requests.
if(coinIQ.getType() == IQ.Type.SET)
if (coinIQ.getType() == IQ.Type.SET)
{
IQ ack = IQ.createResultIQ(coinIQ);
parentProvider.getConnection().sendPacket(ack);
}
String sid = coinIQ.getSID();
if(sid == null)
if (sid != null)
{
return;
}
CallPeerJabberImpl callPeer
= getBasicTelephony().getActiveCallsRepository().findCallPeer(
sid);
CallPeerJabberImpl callPeer
= getBasicTelephony().getActiveCallsRepository().findCallPeer(sid);
if(callPeer == null)
{
return;
if (callPeer != null)
handleCoin(coinIQ, callPeer);
}
handleCoin(coinIQ, callPeer);
}
/**

@ -390,9 +390,9 @@ public void load()
* @param provider the IQ provider class.
*/
private void addProvider(
String elementName,
String namespace,
Class provider)
String elementName,
String namespace,
Class<?> provider)
{
// Attempt to load the provider class and then create
// a new instance if it's an IQProvider. Otherwise, if it's
@ -425,8 +425,10 @@ else if (IQ.class.isAssignableFrom(provider))
* @param namespace the XML namespace.
* @param provider the extension provider class.
*/
public void addExtProvider(String elementName, String namespace,
Class provider)
public void addExtProvider(
String elementName,
String namespace,
Class<?> provider)
{
// Attempt to load the provider class and then create
// a new instance if it's a Provider. Otherwise, if it's
@ -436,11 +438,12 @@ public void addExtProvider(String elementName, String namespace,
try
{
// Add the provider to the map.
if (PacketExtensionProvider.class.isAssignableFrom(
provider))
if (PacketExtensionProvider.class.isAssignableFrom(provider))
{
addExtensionProvider(
elementName, namespace, provider.newInstance());
elementName,
namespace,
provider.newInstance());
}
else if (PacketExtension.class.isAssignableFrom(
provider))

@ -42,47 +42,21 @@ public interface OperationSetVideoTelephony
public void addVideoListener( CallPeer peer, VideoListener listener);
/**
* Creates a visual <tt>Component</tt> which depicts the local video
* being streamed to a specific <tt>CallPeer</tt>. The returned
* visual <tt>Component</tt> should be disposed when it is no longer
* required through {@link #disposeLocalVisualComponent(CallPeer,
* Component) disposeLocalVisualComponent}.
* Gets the visual <tt>Component</tt> which depicts the local video
* being streamed to a specific <tt>CallPeer</tt>.
*
* @param peer the <tt>CallPeer</tt> to whom the local video which is to be
* depicted by the returned visual <tt>Component</tt> is being streamed
* @param listener if not <tt>null</tt>, a <tt>VideoListener</tt> to track
* the progress of the creation in case this telephony chooses to perform it
* asynchronously and to not return the created visual <tt>Component</tt>
* immediately/as the result of this method call
*
* @return a visual <tt>Component</tt> which depicts the local video being
* streamed to the specified <tt>CallPeer</tt> if this telephony chooses to
* carry out the creation synchronously; <tt>null</tt> if this telephony
* chooses to create the requested visual <tt>Component</tt> asynchronously.
*
* chooses to create the requested visual <tt>Component</tt> asynchronously
* @throws OperationFailedException if creating the component fails for
* whatever reason.
*/
public Component createLocalVisualComponent(CallPeer peer,
VideoListener listener)
public Component getLocalVisualComponent(CallPeer peer)
throws OperationFailedException;
/**
* Disposes of a visual <tt>Component</tt> depicting the local video for
* a specific <tt>CallPeer</tt> (previously obtained through
* {@link #createLocalVisualComponent(CallPeer, VideoListener)
* createLocalVisualComponent}).
* The disposal may include, but is not limited to, releasing the
* <tt>Player</tt> which provides the <tt>component</tt> and renders
* the local video into it, disconnecting from the video capture device.
*
* @param peer the <tt>CallPeer</tt> for whom the visual <tt>Component</tt>
* depicts the local video
* @param component the visual <tt>Component</tt> depicting the local video
* to be disposed
*/
public void disposeLocalVisualComponent(CallPeer peer, Component component);
/**
* Gets the visual/video <tt>Component</tt> available in this telephony for
* a specific <tt>CallPeer</tt>.

@ -93,37 +93,17 @@ public void addVideoListener(CallPeer peer, VideoListener listener)
/**
* Implements
* {@link OperationSetVideoTelephony#createLocalVisualComponent(CallPeer,
* VideoListener)}.
* {@link OperationSetVideoTelephony#createLocalVisualComponent(CallPeer)}.
*
* @param peer the <tt>CallPeer</tt> that we are sending our local video to.
* @param listener the <tt>VideoListener</tt> where we'd like to retrieve
* the <tt>Component</tt> containing the local video.
* @return the <tt>Component</tt> containing the local video.
* @throws OperationFailedException if we fail extracting the local video.
*/
@SuppressWarnings("unchecked") // work with MediaAware* in media package
public Component createLocalVisualComponent(
CallPeer peer,
VideoListener listener)
public Component getLocalVisualComponent(CallPeer peer)
throws OperationFailedException
{
return ((W)peer).getMediaHandler().createLocalVisualComponent();
}
/**
* Implements
* {@link OperationSetVideoTelephony#disposeLocalVisualComponent(CallPeer,
* Component)}.
*
* @param peer the <tt>CallPeer</tt> whose local video component we'd like
* to dispose of.
* @param component the <tt>Component</tt> that we'll be disposing of.
*/
@SuppressWarnings("unchecked") // work with MediaAware* in media package
public void disposeLocalVisualComponent(CallPeer peer, Component component)
{
((W)peer).getMediaHandler().disposeLocalVisualComponent(component);
return ((W)peer).getMediaHandler().getLocalVisualComponent();
}
/**

@ -504,11 +504,12 @@ public void setLocalVideoTransmissionEnabled(boolean enabled)
MediaDirection newValue = videoDirectionUserPreference;
/* we do not send an event here if video is enabled because we have to
* wait video stream starts to have correct MediaDevice set in
* VideoMediaDeviceSession
/*
* Do not send an event here if the local video is enabled because the
* video stream needs to start before the correct MediaDevice is set in
* VideoMediaDeviceSession.
*/
if(!enabled)
if (!enabled)
{
firePropertyChange(
OperationSetVideoTelephony.LOCAL_VIDEO_STREAMING,
@ -709,48 +710,21 @@ public void fireVideoEvent(VideoEvent event)
}
/**
* Gets local visual <tt>Component</tt> of the local peer.
* Gets the visual <tt>Component</tt>, if any, depicting the video streamed
* from the local peer to the remote peer.
*
* @return visual <tt>Component</tt>
* @return the visual <tt>Component</tt> depicting the local video if local
* video is actually being streamed from the local peer to the remote peer;
* otherwise, <tt>null</tt>
*/
public Component createLocalVisualComponent()
public Component getLocalVisualComponent()
{
boolean flipLocalVideoDisplay = true;
OperationSetDesktopSharingServer desktopOpSet
= peer.getCall().getProtocolProvider().getOperationSet(
OperationSetDesktopSharingServer.class);
// If the call video is a desktop sharing stream, then do not flip the
// local video display.
if (desktopOpSet != null
&& desktopOpSet.isLocalVideoAllowed(peer.getCall()))
{
flipLocalVideoDisplay = false;
}
MediaStream videoStream = getStream(MediaType.VIDEO);
return
((videoStream == null) || !isLocalVideoTransmissionEnabled())
? null
: ((VideoMediaStream) videoStream).createLocalVisualComponent(
flipLocalVideoDisplay);
}
/**
* Disposes of a specific local visual <tt>Component</tt> of the local peer.
*
* @param component the local visual <tt>Component</tt> of the local peer to
* dispose of
*/
public void disposeLocalVisualComponent(Component component)
{
MediaStream videoStream = getStream(MediaType.VIDEO);
if (videoStream != null)
{
((VideoMediaStream) videoStream).disposeLocalVisualComponent(
component);
}
: ((VideoMediaStream) videoStream).getLocalVisualComponent();
}
/**
@ -919,15 +893,31 @@ protected Map<MediaTypeSrtpControl, SrtpControl> getSrtpControls()
* @throws OperationFailedException if creating the stream fails for any
* reason (like, for example, accessing the device or setting the format).
*/
protected MediaStream initStream(StreamConnector connector,
MediaDevice device,
MediaFormat format,
MediaStreamTarget target,
MediaDirection direction,
List<RTPExtension> rtpExtensions,
boolean masterStream)
protected MediaStream initStream(StreamConnector connector,
MediaDevice device,
MediaFormat format,
MediaStreamTarget target,
MediaDirection direction,
List<RTPExtension> rtpExtensions,
boolean masterStream)
throws OperationFailedException
{
MediaType mediaType = device.getMediaType();
/*
* Do make sure that no unintentional streaming of media generated by
* the user without prior consent will happen.
*/
direction = direction.and(getDirectionUserPreference(mediaType));
if (device != null)
{
/*
* If the device does not support a direction, there is really
* nothing to be done at this point to make it use it.
*/
direction = direction.and(device.getDirection());
}
MediaStream stream
= mediaHandler.initStream(
this,
@ -939,7 +929,7 @@ protected MediaStream initStream(StreamConnector connector,
rtpExtensions,
masterStream);
switch (device.getMediaType())
switch (mediaType)
{
case AUDIO:
audioStream = (AudioMediaStream) stream;
@ -1310,8 +1300,7 @@ && isLocalAudioTransmissionEnabled())
*/
firePropertyChange(
OperationSetVideoTelephony.LOCAL_VIDEO_STREAMING,
null,
this.videoDirectionUserPreference);
null, videoDirectionUserPreference);
if(!stream.isStarted())
{
@ -1586,4 +1575,41 @@ public void fireVisualComponentResolveEvent(
conferenceMember));
}
}
/**
* Determines whether RTP translation is enabled for the <tt>CallPeer</tt>
* represented by this <tt>CallPeerMediaHandler</tt>.
* <p>
* For the sake of simplicity at the time of this writing, the current
* implementation presumes the <tt>MediaType</tt> is <tt>VIDEO</tt>.
* </p>
*
* @return <tt>true</tt> if RTP translation is enabled for the
* <tt>CallPeer</tt> represented by this <tt>CallPeerMediaHandler</tt>;
* otherwise, <tt>false</tt>
*/
public boolean isRTPTranslationEnabled()
{
T peer = getPeer();
MediaAwareCall<?,?,?> call = peer.getCall();
if ((call != null)
&& call.isConferenceFocus()
&& !call.isLocalVideoStreaming())
{
Iterator<?> callPeerIt = call.getCallPeers();
while (callPeerIt.hasNext())
{
MediaAwareCallPeer<?,?,?> callPeer
= (MediaAwareCallPeer<?,?,?>) callPeerIt.next();
MediaStream videoMediaStream
= callPeer.getMediaHandler().getStream(MediaType.VIDEO);
if (videoMediaStream != null)
return true;
}
}
return false;
}
}

@ -205,7 +205,7 @@ protected void addCallPeer(T callPeer)
if(getCallPeersVector().isEmpty())
{
callPeer.getMediaHandler().setLocalUserAudioLevelListener(
localAudioLevelDelegator);
localAudioLevelDelegator);
}
}

Loading…
Cancel
Save