Fixes MediaServiceImpl.getSupportedVideoEncodings() which used to return MediaControl.getSupportedAudioEncodings(). (And makes tiny performance improvements such as preventing double hashtable look-ups, unoptimized conversions from List<String> to String[].)

cusax-fix
Lyubomir Marinov 18 years ago
parent 3f31a62536
commit 6d1b39b045

@ -1361,15 +1361,16 @@ private Vector createMediaDescriptions(
throws SdpException
,MediaException
{
//supported audio formats.
String[] supportedAudioEncodings = mediaServCallback
.getMediaControl(getCall())
.getSupportedAudioEncodings();
MediaControl mediaControl =
mediaServCallback.getMediaControl(getCall());
//supported video formats
String[] supportedVideoEncodings = mediaServCallback
.getMediaControl(getCall())
.getSupportedVideoEncodings();
// supported audio formats.
String[] supportedAudioEncodings =
mediaControl.getSupportedAudioEncodings();
// supported video formats
String[] supportedVideoEncodings =
mediaControl.getSupportedVideoEncodings();
//if there was an offer extract the offered media formats and use
//the intersection between the formats we support and those in the
@ -1523,15 +1524,16 @@ private Hashtable<String, List<String>> intersectMediaEncodings(
Hashtable<String, List<String>> offeredEncodings)
throws MediaException
{
//audio encodings supported by the media controller
String[] supportedAudioEncodings = mediaServCallback
.getMediaControl(getCall())
.getSupportedAudioEncodings();
//video encodings supported by the media controller
String[] supportedVideoEncodings = mediaServCallback
.getMediaControl(getCall())
.getSupportedVideoEncodings();
MediaControl mediaControl =
mediaServCallback.getMediaControl(getCall());
// audio encodings supported by the media controller
String[] supportedAudioEncodings =
mediaControl.getSupportedAudioEncodings();
// video encodings supported by the media controller
String[] supportedVideoEncodings =
mediaControl.getSupportedVideoEncodings();
//audio encodings offered by the remote party
List offeredAudioEncodings = (List)offeredEncodings.get("audio");

@ -517,7 +517,7 @@ private void initProcessor(DataSource dataSource)
}
// 1. Changing buffer size. The default buffer size (for javasound)
// is 125 milliseconds - 1/8 sec. On MacOS this leeds to exception and
// is 125 milliseconds - 1/8 sec. On MacOS this leads to exception and
// no audio capture. 30 value of buffer fix the problem and is ok
// when using some pstn gateways
// 2. Changing to 60. When it is 30 there are some issues
@ -581,8 +581,8 @@ private void initProcessor(DataSource dataSource)
//supported formats arrays.
TrackControl[] trackControls = sourceProcessor.getTrackControls();
logger.debug("We will be able to transmit in:");
List transmittableAudioEncodings = new ArrayList();
List transmittableVideoEncodings = new ArrayList();
List<String> transmittableAudioEncodings = new ArrayList<String>();
List<String> transmittableVideoEncodings = new ArrayList<String>();
for (int i = 0; i < trackControls.length; i++)
{
@ -591,7 +591,7 @@ private void initProcessor(DataSource dataSource)
{
Format format = formats[j];
String encoding = format.getEncoding();
int sdpInt = MediaUtils.jmfToSdpEncoding(encoding);
if (sdpInt != MediaUtils.UNKNOWN_ENCODING)
{
@ -604,20 +604,20 @@ private void initProcessor(DataSource dataSource)
{
if (logger.isDebugEnabled())
{
logger.debug("Audio=[" + (j + 1) + "]=" +
encoding + "; sdp=" + sdp);
logger.debug("Audio=[" + (j + 1) + "]="
+ encoding + "; sdp=" + sdp);
}
transmittableAudioEncodings.add(sdp);
}
}
if (format instanceof VideoFormat)
else if (format instanceof VideoFormat)
{
if (!transmittableVideoEncodings.contains(sdp))
{
if (logger.isDebugEnabled())
{
logger.debug("Video=[" + (j + 1) + "]=" +
encoding + "; sdp=" + sdp);
logger.debug("Video=[" + (j + 1) + "]="
+ encoding + "; sdp=" + sdp);
}
transmittableVideoEncodings.add(sdp);
}
@ -632,20 +632,16 @@ private void initProcessor(DataSource dataSource)
//now update the supported encodings arrays.
if(transmittableAudioEncodings.size() > 0)
final int transmittableAudioEncodingCount =
transmittableAudioEncodings.size();
if (transmittableAudioEncodingCount > 0)
{
supportedAudioEncodings
= new String[transmittableAudioEncodings.size()];
for (int i = 0; i < supportedAudioEncodings.length; i++)
{
supportedAudioEncodings[i]
= (String) transmittableAudioEncodings.get(i);
}
supportedAudioEncodings =
transmittableAudioEncodings
.toArray(new String[transmittableAudioEncodingCount]);
//sort the supported encodings according to user preferences.
// sort the supported encodings according to user preferences.
this.sortEncodingsArray(supportedAudioEncodings);
}
//else
{
@ -654,18 +650,15 @@ private void initProcessor(DataSource dataSource)
//everything.
}
if(transmittableVideoEncodings.size() > 0)
final int transmittableVideoEncodingCount =
transmittableVideoEncodings.size();
if (transmittableVideoEncodingCount > 0)
{
supportedVideoEncodings
= new String[transmittableVideoEncodings.size()];
for (int i = 0; i < supportedVideoEncodings.length; i++)
{
supportedVideoEncodings[i]
= (String) transmittableVideoEncodings.get(i);
}
supportedVideoEncodings =
transmittableVideoEncodings
.toArray(new String[transmittableVideoEncodingCount]);
//sort the supported encodings according to user preferences.
// sort the supported encodings according to user preferences.
this.sortEncodingsArray(supportedVideoEncodings);
}
//else
@ -677,7 +670,7 @@ private void initProcessor(DataSource dataSource)
}
/**
* Closes all curently used capture devices and data sources so that they
* Closes all currently used capture devices and data sources so that they
* would be usable by other applications.
*
* @throws MediaException if closing the devices fails with an IO

@ -130,7 +130,7 @@ public String[] getSupportedAudioEncodings()
*/
public String[] getSupportedVideoEncodings()
{
return defaultMediaControl.getSupportedAudioEncodings();
return defaultMediaControl.getSupportedVideoEncodings();
}

Loading…
Cancel
Save