Builds a neomedia bundle and provides the initial implementation of .service.neomedia.

cusax-fix
Lyubomir Marinov 16 years ago
parent cb33984f35
commit b28c62f5da

@ -786,7 +786,7 @@
bundle-callhistory, bundle-callhistory-slick, bundle-popupmessagehandler-slick,
bundle-netaddr,bundle-netaddr-slick,bundle-slickless,
bundle-slick-runner,bundle-sip,bundle-sip-slick,bundle-fileaccess,
bundle-fileaccess-slick,bundle-media,bundle-media-slick,
bundle-fileaccess-slick,bundle-media,bundle-neomedia,bundle-media-slick,
bundle-resource-manager,bundle-resources-defaultpack,
bundle-protocol,bundle-icq,bundle-icq-slick,bundle-mock,bundle-smacklib,
bundle-jabber,bundle-jabber-slick,bundle-swing-ui,bundle-ui-service,
@ -1033,6 +1033,109 @@
</target>
<!--BUNDLE-NEOMEDIA-->
<target name="bundle-neomedia">
<!-- Creates a bundle containing the impl of the neomedia package and
the win jmf implementation.-->
<jar
compress="false" destfile="${bundles.dest.win}/neomedia.jar"
manifest="${src}/net/java/sip/communicator/impl/neomedia/neomedia.manifest.mf">
<zipfileset dir="${dest}/net/java/sip/communicator/service/neomedia"
prefix="net/java/sip/communicator/service/neomedia"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/neomedia"
prefix="net/java/sip/communicator/impl/neomedia"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media"
includes="ArrayIOUtils*"
prefix="net/java/sip/communicator/impl/media"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec"
includes="Constants*"
prefix="net/java/sip/communicator/impl/media/codec"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/audio"
prefix="net/java/sip/communicator/impl/media/codec/audio"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/video"
prefix="net/java/sip/communicator/impl/media/codec/video"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/protocol"
prefix="net/java/sip/communicator/impl/media/protocol"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/renderer"
prefix="net/java/sip/communicator/impl/media/renderer"/>
<zipfileset dir="${resources}/images/impl/media"
prefix="resources/images/impl/media"/>
<zipfileset src="${lib.win.noinst}/jmf.jar" prefix=""/>
<zipfileset src="${lib.win.noinst}/sound.jar" prefix=""/>
<zipfileset src="${lib.noinst}/fmj.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jain-sdp.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jspeex.jar" prefix=""/>
<zipfileset src="${lib.noinst}/lti-civil-no_s_w_t.jar" prefix=""/>
</jar>
<!-- Creates a bundle containing the impl of the neomedia package and
the linux jmf implementation.-->
<jar
compress="false" destfile="${bundles.dest.lin}/neomedia.jar"
manifest="${src}/net/java/sip/communicator/impl/neomedia/neomedia.manifest.mf">
<zipfileset dir="${dest}/net/java/sip/communicator/service/neomedia"
prefix="net/java/sip/communicator/service/neomedia"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/neomedia"
prefix="net/java/sip/communicator/impl/neomedia"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media"
includes="ArrayIOUtils*"
prefix="net/java/sip/communicator/impl/media"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec"
includes="Constants*"
prefix="net/java/sip/communicator/impl/media/codec"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/audio"
prefix="net/java/sip/communicator/impl/media/codec/audio"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/video"
prefix="net/java/sip/communicator/impl/media/codec/video"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/protocol"
prefix="net/java/sip/communicator/impl/media/protocol"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/renderer"
prefix="net/java/sip/communicator/impl/media/renderer"/>
<zipfileset dir="${resources}/images/impl/media"
prefix="resources/images/impl/media"/>
<zipfileset src="${lib.lin.noinst}/jmf.jar" prefix=""/>
<zipfileset src="${lib.noinst}/fmj.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jain-sdp.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jspeex.jar" prefix=""/>
<zipfileset src="${lib.noinst}/lti-civil-no_s_w_t.jar" prefix=""/>
</jar>
<!-- Creates a bundle containing the impl of the neomedia package and
the mac jmf implementation.-->
<jar
compress="false" destfile="${bundles.dest.mac}/neomedia.jar"
manifest="${src}/net/java/sip/communicator/impl/neomedia/neomedia.manifest.mf">
<zipfileset dir="${dest}/net/java/sip/communicator/service/neomedia"
prefix="net/java/sip/communicator/service/neomedia"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/neomedia"
prefix="net/java/sip/communicator/impl/neomedia"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media"
includes="ArrayIOUtils*"
prefix="net/java/sip/communicator/impl/media"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec"
includes="Constants*"
prefix="net/java/sip/communicator/impl/media/codec"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/audio"
prefix="net/java/sip/communicator/impl/media/codec/audio"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/codec/video"
prefix="net/java/sip/communicator/impl/media/codec/video"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/protocol"
prefix="net/java/sip/communicator/impl/media/protocol"/>
<zipfileset dir="${dest}/net/java/sip/communicator/impl/media/renderer"
prefix="net/java/sip/communicator/impl/media/renderer"/>
<zipfileset dir="${resources}/images/impl/media"
prefix="resources/images/impl/media"/>
<zipfileset src="${lib.mac.noinst}/jmf.jar" prefix=""/>
<zipfileset src="${lib.noinst}/fmj.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jain-sdp.jar" prefix=""/>
<zipfileset src="${lib.noinst}/jspeex.jar" prefix=""/>
<zipfileset src="${lib.noinst}/lti-civil-no_s_w_t.jar" prefix=""/>
</jar>
</target>
<!--BUNDLE-MEDIA-SLICK-->
<target name="bundle-media-slick">
<!-- Creates a bundle containing the slick for the media package."-->

@ -801,6 +801,8 @@ impl.media.configform.NO_PREVIEW=Preview
impl.media.configform.UP=&Up
impl.media.configform.VIDEO=&Video:
impl.neomedia.configform.TITLE=Neomedia
# otr plugin
plugin.otr.menu.TITLE=OTR
plugin.otr.menu.START_OTR=Start private conversation

@ -0,0 +1,158 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import javax.media.*;
import javax.media.format.*;
import javax.media.rtp.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.event.*;
import net.java.sip.communicator.util.*;
/**
* @author Lubomir Marinov
*/
public class AudioMediaStreamImpl
extends MediaStreamImpl
implements AudioMediaStream
{
private static final Logger logger
= Logger.getLogger(AudioMediaStreamImpl.class);
/**
* List of RTP format strings which are supported by SIP Communicator in
* addition to the JMF standard formats.
*
* @see #registerCustomCodecFormats(RTPManager)
*/
private static final Format[] CUSTOM_CODEC_FORMATS
= new Format[]
{
/*
* these formats are specific, since RTP uses format numbers
* with no parameters.
*/
new AudioFormat(
Constants.ILBC_RTP,
8000.0,
16,
1,
AudioFormat.LITTLE_ENDIAN,
AudioFormat.SIGNED),
new AudioFormat(
Constants.ALAW_RTP,
8000,
8,
1,
-1,
AudioFormat.SIGNED),
new AudioFormat(Constants.SPEEX_RTP,
8000,
8,
1,
-1,
AudioFormat.SIGNED)
};
/**
* JMF stores <tt>CUSTOM_CODEC_FORMATS</tt> statically, so they only need to
* be registered once. FMJ does this dynamically (per instance), so it needs
* to be done for every time we instantiate an RTP manager.
*/
private static boolean formatsRegisteredOnce = false;
public AudioMediaStreamImpl(StreamConnector connector, MediaDevice device)
{
super(connector, device);
}
/*
* Implements AudioMediaStream#addDTMFListener(DTMFListener).
*/
public void addDTMFListener(DTMFListener listener)
{
// TODO Auto-generated method stub
}
/*
* Implements AudioMediaStream#addSoundLevelListener(SoundLevelListener).
*/
public void addSoundLevelListener(SoundLevelListener listener)
{
// TODO Auto-generated method stub
}
protected void registerCustomCodecFormats(RTPManager rtpManager)
{
// if we have already registered custom formats and we are running JMF
// we bail out.
if (!FMJConditionals.REGISTER_FORMATS_WITH_EVERY_RTP_MANAGER
&& formatsRegisteredOnce)
return;
for (Format format : CUSTOM_CODEC_FORMATS)
{
logger.debug("registering format " + format + " with RTP manager");
/*
* NOTE (mkoch@rowa.de): com.sun.media.rtp.RtpSessionMgr.addFormat
* leaks memory, since it stores the Format in a static Vector.
* AFAIK there is no easy way around it, but the memory impact
* should not be too bad.
*/
rtpManager
.addFormat(
format,
MediaUtils.jmfToSdpEncoding(format.getEncoding()));
}
formatsRegisteredOnce = true;
}
/*
* Implements AudioMediaStream#removeDTMFListener(DTMFListener).
*/
public void removeDTMFListener(DTMFListener listener)
{
// TODO Auto-generated method stub
}
/*
* Implements AudioMediaStream#removeSoundLevelListener(SoundLevelListener).
*/
public void removeSoundLevelListener(SoundLevelListener listener)
{
// TODO Auto-generated method stub
}
/*
* Implements AudioMediaStream#setMute(boolean).
*/
public void setMute(boolean mute)
{
// TODO Auto-generated method stub
}
/*
* Implements AudioMediaStream#startSendingDTMF(DTMFTone).
*/
public void startSendingDTMF(DTMFTone tone)
{
// TODO Auto-generated method stub
}
/*
* Implements AudioMediaStream#stopSendingDTMF().
*/
public void stopSendingDTMF()
{
// TODO Auto-generated method stub
}
}

@ -0,0 +1,292 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.util.*;
import javax.media.*;
import javax.swing.*;
import javax.swing.event.*;
import net.java.sip.communicator.impl.neomedia.device.*;
/**
* @author Lubomir Marinov
*/
public class DeviceConfigurationComboBoxModel
implements ComboBoxModel
{
/**
* Encapsulates CaptureDeviceInfo
*/
public static class CaptureDevice
{
/**
* Compares two CaptureDeviceInfo
* @param a
* @param b
* @return whether a is equal to b
*/
public static boolean equals(CaptureDeviceInfo a, CaptureDeviceInfo b)
{
return (a == null) ? (b == null) : a.equals(b);
}
/**
* The encapsulated info.
*/
public final CaptureDeviceInfo info;
/**
* Creates the wrapper.
* @param info the info object we wrap.
*/
public CaptureDevice(CaptureDeviceInfo info)
{
this.info = info;
}
public String toString()
{
return
(info == null)
? NeomediaActivator
.getResources()
.getI18NString("impl.media.configform.NO_DEVICE")
: info.getName();
}
}
/**
* Type of the model - audio.
*/
public static final int AUDIO = 1;
/**
* Type of the model - video.
*/
public static final int VIDEO = 2;
/**
* Audio Capture Device.
*/
public static final int AUDIO_CAPTURE = 3;
/**
* Audio playback device.
*/
public static final int AUDIO_PLAYBACK = 4;
/**
* Audio device for notification sounds.
*/
public static final int AUDIO_NOTIFY = 5;
private final DeviceConfiguration deviceConfiguration;
private CaptureDevice[] devices;
private final List<ListDataListener> listeners =
new ArrayList<ListDataListener>();
private final int type;
/**
* Creates device combobx model
* @param deviceConfiguration the current device configuration
* @param type the device - audio/video
*/
public DeviceConfigurationComboBoxModel(
DeviceConfiguration deviceConfiguration, int type)
{
if (deviceConfiguration == null)
throw new IllegalArgumentException("deviceConfiguration");
if ((type != AUDIO_CAPTURE) && (type != AUDIO_NOTIFY) &&
(type != AUDIO_PLAYBACK) &&
(type != AUDIO) && (type != VIDEO))
throw new IllegalArgumentException("type");
this.deviceConfiguration = deviceConfiguration;
this.type = type;
}
public void addListDataListener(ListDataListener listener)
{
if (listener == null)
throw new IllegalArgumentException("listener");
if (!listeners.contains(listener))
listeners.add(listener);
}
/**
* Change of the content.
* @param index0 from index.
* @param index1 to index.
*/
protected void fireContentsChanged(int index0, int index1)
{
ListDataListener[] listeners =
this.listeners.toArray(new ListDataListener[this.listeners.size()]);
ListDataEvent event =
new ListDataEvent(this, ListDataEvent.CONTENTS_CHANGED, index0,
index1);
for (ListDataListener listener : listeners)
{
listener.contentsChanged(event);
}
}
private DeviceConfiguration getDeviceConfiguration()
{
return deviceConfiguration;
}
private CaptureDevice[] getDevices()
{
if (devices != null)
return devices;
CaptureDeviceInfo[] infos;
switch (type)
{
case AUDIO_CAPTURE:
// supply only portaudio devices, as we are in case specifying
// capture devices available only for portaudio
infos = deviceConfiguration.getAvailableAudioCaptureDevices(
DeviceConfiguration.AUDIO_SYSTEM_PORTAUDIO);
break;
case AUDIO_NOTIFY:
case AUDIO_PLAYBACK:
infos = deviceConfiguration.getAvailableAudioPlaybackDevices();
break;
case VIDEO:
infos = deviceConfiguration.getAvailableVideoCaptureDevices();
break;
default:
throw new IllegalStateException("type");
}
final int deviceCount = infos.length;
devices = new CaptureDevice[deviceCount + 1];
for (int i = 0; i < deviceCount; i++)
{
devices[i] = new CaptureDevice(infos[i]);
}
devices[deviceCount] = new CaptureDevice(null);
return devices;
}
private CaptureDevice getSelectedDevice()
{
CaptureDeviceInfo info;
switch (type)
{
case AUDIO_CAPTURE:
info = deviceConfiguration.getAudioCaptureDevice();
break;
case AUDIO_NOTIFY:
info = deviceConfiguration.getAudioNotifyDevice();
break;
case AUDIO_PLAYBACK:
info = deviceConfiguration.getAudioPlaybackDevice();
break;
case VIDEO:
info = deviceConfiguration.getVideoCaptureDevice();
break;
default:
throw new IllegalStateException("type");
}
for (CaptureDevice device : getDevices())
{
if (CaptureDevice.equals(device.info, info))
return device;
}
return null;
}
public Object getElementAt(int index)
{
if(type == AUDIO)
return deviceConfiguration.getAvailableAudioSystems()[index];
else
return getDevices()[index];
}
public Object getSelectedItem()
{
if(type == AUDIO)
return deviceConfiguration.getAudioSystem();
else
return getSelectedDevice();
}
public int getSize()
{
if(type == AUDIO)
return deviceConfiguration.getAvailableAudioSystems().length;
else
return getDevices().length;
}
public void removeListDataListener(ListDataListener listener)
{
if (listener == null)
throw new IllegalArgumentException("listener");
listeners.remove(listener);
}
private void setSelectedDevice(CaptureDevice device)
{
// We cannot clear the selection of DeviceConfiguration.
if (device == null)
return;
CaptureDevice selectedDevice = getSelectedDevice();
if (selectedDevice != device)
{
DeviceConfiguration deviceConfiguration = getDeviceConfiguration();
switch (type)
{
case AUDIO_CAPTURE:
deviceConfiguration.setAudioCaptureDevice(device.info);
break;
case AUDIO_NOTIFY:
deviceConfiguration.setAudioNotifyDevice(device.info);
break;
case AUDIO_PLAYBACK:
deviceConfiguration.setAudioPlaybackDevice(device.info);
break;
case VIDEO:
deviceConfiguration.setVideoCaptureDevice(device.info);
break;
}
fireContentsChanged(-1, -1);
}
}
public void setSelectedItem(Object item)
{
if(type == AUDIO)
{
String systemName = (String)item;
if(!systemName.equals(deviceConfiguration.getAudioSystem()))
{
deviceConfiguration.setAudioSystem(systemName, null);
fireContentsChanged(-1, -1);
}
}
else
setSelectedDevice((CaptureDevice) item);
}
}

@ -0,0 +1,184 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.util.*;
import javax.swing.table.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
/**
* @author Lubomir Marinov
*/
public class EncodingConfigurationTableModel
extends AbstractTableModel
{
public static final int AUDIO = DeviceConfigurationComboBoxModel.AUDIO;
private static final String[] NO_ENCODINGS = new String[0];
public static final int VIDEO = DeviceConfigurationComboBoxModel.VIDEO;
private final EncodingConfiguration encodingConfiguration;
private String[] encodings;
private final int type;
public EncodingConfigurationTableModel(
EncodingConfiguration encodingConfiguration, int type)
{
if (encodingConfiguration == null)
throw new IllegalArgumentException("encodingConfiguration");
if ((type != AUDIO) && (type != VIDEO))
throw new IllegalArgumentException("type");
this.encodingConfiguration = encodingConfiguration;
this.type = type;
}
public Class<?> getColumnClass(int columnIndex)
{
return (columnIndex == 0) ? Boolean.class : super
.getColumnClass(columnIndex);
}
public int getColumnCount()
{
return 2;
}
private String[] getEncodings()
{
if (encodings != null)
return encodings;
String[] availableEncodings;
switch (type)
{
case AUDIO:
availableEncodings =
encodingConfiguration.getAvailableAudioEncodings();
break;
case VIDEO:
availableEncodings =
encodingConfiguration.getAvailableVideoEncodings();
break;
default:
throw new IllegalStateException("type");
}
final int encodingCount = availableEncodings.length;
if (encodingCount < 1)
encodings = NO_ENCODINGS;
else
{
encodings = new String[encodingCount];
System
.arraycopy(availableEncodings, 0, encodings, 0, encodingCount);
Arrays.sort(encodings, 0, encodingCount, new Comparator<String>()
{
public int compare(String encoding0, String encoding1)
{
return encodingConfiguration.getPriority(encoding1) -
encodingConfiguration.getPriority(encoding0);
}
});
}
return encodings;
}
private int[] getPriorities()
{
String[] encodings = getEncodings();
final int count = encodings.length;
int[] priorities = new int[count];
for (int i = 0; i < count; i++)
{
int priority = encodingConfiguration.getPriority(encodings[i]);
priorities[i] = (priority > 0) ? (count - i) : 0;
}
return priorities;
}
public int getRowCount()
{
return getEncodings().length;
}
public Object getValueAt(int rowIndex, int columnIndex)
{
String encoding = getEncodings()[rowIndex];
switch (columnIndex)
{
case 0:
return (encodingConfiguration.getPriority(encoding) > 0);
case 1:
return MediaUtils.sdpToJmfEncoding(encoding);
default:
return null;
}
}
public boolean isCellEditable(int rowIndex, int columnIndex)
{
return (columnIndex == 0);
}
public int move(int rowIndex, boolean up)
{
if (up)
{
if (rowIndex <= 0)
throw new IllegalArgumentException("rowIndex");
return move(rowIndex - 1, false) - 1;
}
if (rowIndex >= (getRowCount() - 1))
throw new IllegalArgumentException("rowIndex");
int[] priorities = getPriorities();
final int nextRowIndex = rowIndex + 1;
if (priorities[rowIndex] > 0)
priorities[rowIndex] = priorities.length - nextRowIndex;
if (priorities[nextRowIndex] > 0)
priorities[nextRowIndex] = priorities.length - rowIndex;
setPriorities(priorities);
String swap = encodings[rowIndex];
encodings[rowIndex] = encodings[nextRowIndex];
encodings[nextRowIndex] = swap;
fireTableRowsUpdated(rowIndex, nextRowIndex);
return nextRowIndex;
}
private void setPriorities(int[] priorities)
{
final int count = encodings.length;
if (priorities.length != count)
throw new IllegalArgumentException("priorities");
for (int i = 0; i < count; i++)
encodingConfiguration.setPriority(encodings[i], priorities[i]);
}
public void setValueAt(Object value, int rowIndex, int columnIndex)
{
if ((columnIndex == 0) && (value instanceof Boolean))
{
int[] priorities = getPriorities();
priorities[rowIndex] =
((Boolean) value) ? (priorities.length - rowIndex) : 0;
setPriorities(priorities);
fireTableCellUpdated(rowIndex, columnIndex);
}
}
}

@ -0,0 +1,58 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import javax.media.format.*;
/**
* Class to centralize workarounds or changes that need to be made for FMJ to work.
* This is also a place to tweak which workarounds are used.
*
* @author Ken Larson
*/
public class FMJConditionals
{
public static final boolean IS_FMJ = false;
/**
* Some SC codecs depend on internal Sun/IBM JMF classes.
*/
public static final boolean FMJ_CODECS = IS_FMJ;
/**
* FMJ's filter graph builder does not give specific formats for the tracks,
* but rather a general format with no encoding.
*/
public static final AudioFormat FORCE_AUDIO_FORMAT = !IS_FMJ
? null
: new AudioFormat(AudioFormat.ULAW_RTP, 8000, 8, 1);
// to force ALAW/rtp, change the format to the following
// new AudioFormat(
// "ALAW/rtp",
// 8000,
// 8,
// 1,
// -1,
// AudioFormat.SIGNED);
/**
* JMF stores CUSTOM_CODEC_FORMATS statically, so they only need to be
* registered once. FMJ does this dynamically (per instance), so it needs
* to be done for every time we instantiate an RTP manager. This variable
* determines whether we should register them every time (FMJ) or not (JMF).
*/
public static final boolean REGISTER_FORMATS_WITH_EVERY_RTP_MANAGER
= IS_FMJ;
/**
* Whether to use JMF's internal registry to avoid auto-detecting capture
* devices each time, by tagging it with our own "author" property.
*/
public static final boolean USE_JMF_INTERNAL_REGISTRY = !IS_FMJ;
}

@ -0,0 +1,631 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license. See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import javax.media.*;
import javax.media.MediaException;
import javax.media.protocol.*;
import javax.swing.*;
import javax.swing.event.*;
import javax.swing.table.*;
import net.java.sip.communicator.impl.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.resources.*;
import net.java.sip.communicator.util.*;
import net.java.sip.communicator.util.swing.*;
import org.osgi.framework.*;
/**
* @author Lubomir Marinov
* @author Damian Minkov
*/
public class MediaConfigurationPanel
extends TransparentPanel
{
private static final int HGAP = 5;
private static final int VGAP = 5;
private static MediaServiceImpl getMediaService()
{
BundleContext bundleContext = NeomediaActivator.getBundleContext();
ServiceReference serviceReference
= bundleContext.getServiceReference(MediaService.class.getName());
return
(serviceReference == null)
? null
: (MediaServiceImpl) bundleContext.getService(serviceReference);
}
private final Logger logger
= Logger.getLogger(MediaConfigurationPanel.class);
private final MediaServiceImpl mediaService = getMediaService();
/**
* The video <code>CaptureDeviceInfo</code> this instance started to create
* the preview of.
* <p>
* Because the creation of the preview is asynchronous, it's possible to
* request the preview of one and the same device multiple times. Which may
* lead to failures because of, for example, busy devices and/or resources
* (as is the case with LTI-CIVIL and video4linux2).
* </p>
*/
private CaptureDeviceInfo videoDeviceInPreview;
/**
* The <code>Player</code> depicting the preview of the currently selected
* <code>CaptureDeviceInfo</code>.
*/
private Player videoPlayerInPreview;
/**
* Creates the panel.
*/
public MediaConfigurationPanel()
{
super(new GridLayout(0, 1, HGAP, VGAP));
int[] types
= new int[]
{
DeviceConfigurationComboBoxModel.AUDIO,
DeviceConfigurationComboBoxModel.VIDEO
};
for (int type : types)
add(createControls(type));
}
private void controllerUpdateForPreview(ControllerEvent event,
Container videoContainer)
{
if (event instanceof RealizeCompleteEvent)
{
Player player = (Player) event.getSourceController();
Component video = player.getVisualComponent();
showPreview(videoContainer, video, player);
}
}
private void createPortAudioControls(Container portAudioPanel)
{
portAudioPanel.add(new JLabel(getLabelText(
DeviceConfigurationComboBoxModel.AUDIO_CAPTURE)));
JComboBox captureCombo = new JComboBox();
captureCombo.setEditable(false);
captureCombo.setModel(
new DeviceConfigurationComboBoxModel(
mediaService.getDeviceConfiguration(),
DeviceConfigurationComboBoxModel.AUDIO_CAPTURE));
portAudioPanel.add(captureCombo);
portAudioPanel.add(new JLabel(getLabelText(
DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK)));
JComboBox playbackCombo = new JComboBox();
playbackCombo.setEditable(false);
playbackCombo.setModel(
new DeviceConfigurationComboBoxModel(
mediaService.getDeviceConfiguration(),
DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK));
portAudioPanel.add(playbackCombo);
portAudioPanel.add(new JLabel(getLabelText(
DeviceConfigurationComboBoxModel.AUDIO_NOTIFY)));
JComboBox notifyCombo = new JComboBox();
notifyCombo.setEditable(false);
notifyCombo.setModel(
new DeviceConfigurationComboBoxModel(
mediaService.getDeviceConfiguration(),
DeviceConfigurationComboBoxModel.AUDIO_NOTIFY));
portAudioPanel.add(notifyCombo);
}
private Component createControls(int type)
{
final JComboBox comboBox = new JComboBox();
comboBox.setEditable(false);
comboBox
.setModel(
new DeviceConfigurationComboBoxModel(
mediaService.getDeviceConfiguration(),
type));
/*
* We provide additional configuration properties for PortAudio such as
* input audio device, output audio device and audio device for playback
* of notifications.
*/
final Container portAudioPanel;
if (type == DeviceConfigurationComboBoxModel.AUDIO)
{
portAudioPanel
= new TransparentPanel(new GridLayout(3, 2, HGAP, VGAP));
comboBox.addItemListener(new ItemListener()
{
public void itemStateChanged(ItemEvent e)
{
if(e.getStateChange() == ItemEvent.SELECTED)
{
if(DeviceConfiguration
.AUDIO_SYSTEM_PORTAUDIO.equals(e.getItem()))
{
createPortAudioControls(portAudioPanel);
}
else
{
portAudioPanel.removeAll();
revalidate();
repaint();
}
}
}
});
if (DeviceConfiguration
.AUDIO_SYSTEM_PORTAUDIO.equals(comboBox.getSelectedItem()))
createPortAudioControls(portAudioPanel);
}
else
portAudioPanel = null;
JLabel label = new JLabel(getLabelText(type));
label.setDisplayedMnemonic(getDisplayedMnemonic(type));
label.setLabelFor(comboBox);
Container firstContainer = new TransparentPanel(new GridBagLayout());
GridBagConstraints firstConstraints = new GridBagConstraints();
firstConstraints.anchor = GridBagConstraints.NORTHWEST;
firstConstraints.gridx = 0;
firstConstraints.gridy = 0;
firstConstraints.weightx = 0;
firstContainer.add(label, firstConstraints);
firstConstraints.gridx = 1;
firstConstraints.weightx = 1;
firstContainer.add(comboBox, firstConstraints);
if (portAudioPanel != null)
{
firstConstraints.gridx = 0;
firstConstraints.gridy = 1;
firstConstraints.weightx = 1;
firstConstraints.gridwidth = 2;
firstConstraints.insets = new Insets(VGAP, 0, 0, 0);
firstContainer.add(portAudioPanel, firstConstraints);
}
Container secondContainer =
new TransparentPanel(new GridLayout(1, 0, HGAP, VGAP));
secondContainer.add(createPreview(type, comboBox));
secondContainer.add(createEncodingControls(type));
Container container = new TransparentPanel(new GridBagLayout());
GridBagConstraints constraints = new GridBagConstraints();
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.weightx = 1;
constraints.weighty = 0;
container.add(firstContainer, constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridy = 1;
constraints.weighty = 1;
container.add(secondContainer, constraints);
return container;
}
private Component createEncodingControls(int type)
{
ResourceManagementService resources = NeomediaActivator.getResources();
String key;
final JTable table = new JTable();
table.setShowGrid(false);
table.setTableHeader(null);
key = "impl.media.configform.ENCODINGS";
JLabel label = new JLabel(resources.getI18NString(key));
label.setDisplayedMnemonic(resources.getI18nMnemonic(key));
label.setLabelFor(table);
key = "impl.media.configform.UP";
final JButton upButton = new JButton(resources.getI18NString(key));
upButton.setMnemonic(resources.getI18nMnemonic(key));
upButton.setOpaque(false);
key = "impl.media.configform.DOWN";
final JButton downButton = new JButton(resources.getI18NString(key));
downButton.setMnemonic(resources.getI18nMnemonic(key));
downButton.setOpaque(false);
Container buttonBar = new TransparentPanel(new GridLayout(0, 1));
buttonBar.add(upButton);
buttonBar.add(downButton);
Container container = new TransparentPanel(new GridBagLayout());
GridBagConstraints constraints = new GridBagConstraints();
constraints.anchor = GridBagConstraints.NORTHWEST;
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridwidth = 2;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.weightx = 0;
constraints.weighty = 0;
container.add(label, constraints);
constraints.anchor = GridBagConstraints.CENTER;
constraints.fill = GridBagConstraints.BOTH;
constraints.gridwidth = 1;
constraints.gridx = 0;
constraints.gridy = 1;
constraints.weightx = 1;
constraints.weighty = 1;
container.add(new JScrollPane(table), constraints);
constraints.anchor = GridBagConstraints.NORTHEAST;
constraints.fill = GridBagConstraints.NONE;
constraints.gridwidth = 1;
constraints.gridx = 1;
constraints.gridy = 1;
constraints.weightx = 0;
constraints.weighty = 0;
container.add(buttonBar, constraints);
table.setModel(new EncodingConfigurationTableModel(mediaService
.getEncodingConfiguration(), type));
/*
* The first column contains the check boxes which enable/disable their
* associated encodings and it doesn't make sense to make it wider than
* the check boxes.
*/
TableColumnModel tableColumnModel = table.getColumnModel();
TableColumn tableColumn = tableColumnModel.getColumn(0);
tableColumn.setMaxWidth(tableColumn.getMinWidth());
ListSelectionListener tableSelectionListener =
new ListSelectionListener()
{
public void valueChanged(ListSelectionEvent event)
{
if (table.getSelectedRowCount() == 1)
{
int selectedRow = table.getSelectedRow();
if (selectedRow > -1)
{
upButton.setEnabled(selectedRow > 0);
downButton.setEnabled(selectedRow < (table
.getRowCount() - 1));
return;
}
}
upButton.setEnabled(false);
downButton.setEnabled(false);
}
};
table.getSelectionModel().addListSelectionListener(
tableSelectionListener);
tableSelectionListener.valueChanged(null);
ActionListener buttonListener = new ActionListener()
{
public void actionPerformed(ActionEvent event)
{
Object source = event.getSource();
boolean up;
if (source == upButton)
up = true;
else if (source == downButton)
up = false;
else
return;
move(table, up);
}
};
upButton.addActionListener(buttonListener);
downButton.addActionListener(buttonListener);
return container;
}
private void createPreview(CaptureDeviceInfo device,
final Container videoContainer)
throws IOException,
MediaException
{
videoContainer.removeAll();
if (videoPlayerInPreview != null)
disposePlayer(videoPlayerInPreview);
if (device == null)
return;
DataSource dataSource = Manager.createDataSource(device.getLocator());
Dimension size = videoContainer.getPreferredSize();
VideoMediaStreamImpl
.selectVideoSize(dataSource, size.width, size.height);
Player player = Manager.createPlayer(dataSource);
videoPlayerInPreview = player;
player.addControllerListener(new ControllerListener()
{
public void controllerUpdate(ControllerEvent event)
{
controllerUpdateForPreview(event, videoContainer);
}
});
player.start();
}
private Component createPreview(int type, final JComboBox comboBox)
{
final Container preview;
if (type == DeviceConfigurationComboBoxModel.VIDEO)
{
JLabel noPreview
= new JLabel(
NeomediaActivator
.getResources()
.getI18NString(
"impl.media.configform.NO_PREVIEW"));
noPreview.setHorizontalAlignment(SwingConstants.CENTER);
noPreview.setVerticalAlignment(SwingConstants.CENTER);
preview = createVideoContainer(noPreview);
final ActionListener comboBoxListener = new ActionListener()
{
public void actionPerformed(ActionEvent event)
{
Object selection = comboBox.getSelectedItem();
CaptureDeviceInfo device = null;
if (selection
instanceof
DeviceConfigurationComboBoxModel.CaptureDevice)
device
= ((DeviceConfigurationComboBoxModel.CaptureDevice)
selection)
.info;
if ((device != null) && device.equals(videoDeviceInPreview))
return;
Exception exception;
try
{
createPreview(device, preview);
exception = null;
}
catch (IOException ex)
{
exception = ex;
}
catch (MediaException ex)
{
exception = ex;
}
if (exception != null)
{
logger.error(
"Failed to create preview for device " + device,
exception);
device = null;
}
videoDeviceInPreview = device;
}
};
comboBox.addActionListener(comboBoxListener);
/*
* We have to initialize the controls to reflect the configuration
* at the time of creating this instance. Additionally, because the
* video preview will stop when it and its associated controls
* become unnecessary, we have to restart it when the mentioned
* controls become necessary again. We'll address the two goals
* described by pretending there's a selection in the video combo
* box when the combo box in question becomes displayable.
*/
comboBox.addHierarchyListener(new HierarchyListener()
{
public void hierarchyChanged(HierarchyEvent event)
{
if (((event.getChangeFlags()
& HierarchyEvent.DISPLAYABILITY_CHANGED)
!= 0)
&& comboBox.isDisplayable())
comboBoxListener.actionPerformed(null);
}
});
} else
preview = new TransparentPanel();
return preview;
}
private Container createVideoContainer(Component noVideoComponent)
{
return new VideoContainer(noVideoComponent);
}
private void disposePlayer(Player player)
{
player.stop();
player.deallocate();
player.close();
if ((videoPlayerInPreview != null)
&& videoPlayerInPreview.equals(player))
videoPlayerInPreview = null;
}
private char getDisplayedMnemonic(int type)
{
switch (type)
{
case DeviceConfigurationComboBoxModel.AUDIO:
return
NeomediaActivator
.getResources()
.getI18nMnemonic("impl.media.configform.AUDIO");
case DeviceConfigurationComboBoxModel.VIDEO:
return
NeomediaActivator
.getResources()
.getI18nMnemonic("impl.media.configform.VIDEO");
default:
throw new IllegalArgumentException("type");
}
}
private String getLabelText(int type)
{
switch (type)
{
case DeviceConfigurationComboBoxModel.AUDIO:
return
NeomediaActivator
.getResources()
.getI18NString("impl.media.configform.AUDIO");
case DeviceConfigurationComboBoxModel.AUDIO_CAPTURE:
return
NeomediaActivator
.getResources()
.getI18NString("impl.media.configform.AUDIO_IN");
case DeviceConfigurationComboBoxModel.AUDIO_NOTIFY:
return
NeomediaActivator
.getResources()
.getI18NString("impl.media.configform.AUDIO_NOTIFY");
case DeviceConfigurationComboBoxModel.AUDIO_PLAYBACK:
return
NeomediaActivator
.getResources()
.getI18NString("impl.media.configform.AUDIO_OUT");
case DeviceConfigurationComboBoxModel.VIDEO:
return
NeomediaActivator
.getResources()
.getI18NString("impl.media.configform.VIDEO");
default:
throw new IllegalArgumentException("type");
}
}
private void move(JTable table, boolean up)
{
int index =
((EncodingConfigurationTableModel) table.getModel()).move(table
.getSelectedRow(), up);
table.getSelectionModel().setSelectionInterval(index, index);
}
private void showPreview(final Container previewContainer,
final Component preview, final Player player)
{
if (!SwingUtilities.isEventDispatchThread())
{
SwingUtilities.invokeLater(new Runnable()
{
public void run()
{
showPreview(previewContainer, preview, player);
}
});
return;
}
previewContainer.removeAll();
if (preview != null)
{
HierarchyListener hierarchyListener = new HierarchyListener()
{
private Window window;
private WindowListener windowListener;
public void dispose()
{
if (windowListener != null)
{
if (window != null)
{
window.removeWindowListener(windowListener);
window = null;
}
windowListener = null;
}
preview.removeHierarchyListener(this);
disposePlayer(player);
videoDeviceInPreview = null;
/*
* We've just disposed the player which created the preview
* component so the preview component is of no use
* regardless of whether the Media configuration form will
* be redisplayed or not. And since the preview component
* appears to be a huge object even after its player is
* disposed, make sure to not reference it.
*/
previewContainer.remove(preview);
}
public void hierarchyChanged(HierarchyEvent event)
{
if ((event.getChangeFlags()
& HierarchyEvent.DISPLAYABILITY_CHANGED)
!= 0)
{
if (preview.isDisplayable())
{
if (windowListener == null)
{
window =
SwingUtilities.windowForComponent(preview);
if (window != null)
{
windowListener = new WindowAdapter()
{
public void windowClosing(
WindowEvent event)
{
dispose();
}
};
window.addWindowListener(windowListener);
}
}
}
else
{
dispose();
}
}
}
};
preview.addHierarchyListener(hierarchyListener);
previewContainer.add(preview);
}
else
disposePlayer(player);
}
}

@ -10,6 +10,7 @@
import javax.media.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.impl.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
@ -21,6 +22,12 @@ public class MediaServiceImpl
implements MediaService
{
/**
* With this property video support can be disabled (enabled by default).
*/
public static final String DISABLE_VIDEO_SUPPORT_PROPERTY_NAME
= "net.java.sip.communicator.service.media.DISABLE_VIDEO_SUPPORT";
/**
* The value of the <tt>devices</tt> property of <tt>MediaServiceImpl</tt>
* when no <tt>MediaDevice</tt>s are available. Explicitly defined in order
@ -29,32 +36,11 @@ public class MediaServiceImpl
private static final List<MediaDevice> EMPTY_DEVICES
= Collections.emptyList();
/**
* The <tt>net.java.sip.communicator.impl.media.MediaServiceImpl</tt> this
* instance delegates to for functionality it already supports in order to
* keep this instance as compatible with it as possible.
*/
private final net.java.sip.communicator.impl.media.MediaServiceImpl mediaServiceImpl;
/**
* Initializes a new <tt>MediaServiceImpl</tt> instance which is to delegate
* to a specific
* <tt>net.java.sip.communicator.impl.media.MediaServiceImpl</tt> for
* functionality it already supports in order to keep the new instance as
* compatible with the specified <tt>mediaServiceImpl</tt> as possible.
*
* @param mediaServiceImpl the
* <tt>net.java.sip.communicator.impl.media.MediaServiceImpl</tt> the new
* instance is to delegate to
*/
public MediaServiceImpl(
net.java.sip.communicator.impl.media.MediaServiceImpl mediaServiceImpl)
{
if (mediaServiceImpl == null)
throw new NullPointerException("mediaServiceImpl");
private final DeviceConfiguration deviceConfiguration
= new DeviceConfiguration();
this.mediaServiceImpl = mediaServiceImpl;
}
private final EncodingConfiguration encodingConfiguration
= new EncodingConfiguration();
/*
* Implements MediaService#createMediaStream(StreamConnector, MediaDevice).
@ -63,8 +49,15 @@ public MediaStream createMediaStream(
StreamConnector connector,
MediaDevice device)
{
// TODO Auto-generated method stub
return null;
switch (device.getMediaType())
{
case AUDIO:
return new AudioMediaStreamImpl(connector, device);
case VIDEO:
return new VideoMediaStreamImpl(connector, device);
default:
return null;
}
}
/*
@ -78,13 +71,11 @@ public MediaDevice getDefaultDevice(MediaType mediaType)
{
case AUDIO:
captureDeviceInfo
= mediaServiceImpl
.getDeviceConfiguration().getAudioCaptureDevice();
= getDeviceConfiguration().getAudioCaptureDevice();
break;
case VIDEO:
captureDeviceInfo
= mediaServiceImpl
.getDeviceConfiguration().getVideoCaptureDevice();
= getDeviceConfiguration().getVideoCaptureDevice();
break;
default:
captureDeviceInfo = null;
@ -97,6 +88,11 @@ public MediaDevice getDefaultDevice(MediaType mediaType)
: new CaptureMediaDevice(captureDeviceInfo, mediaType);
}
DeviceConfiguration getDeviceConfiguration()
{
return deviceConfiguration;
}
/*
* Implements MediaService#getDevices(MediaType).
*/
@ -108,13 +104,11 @@ public List<MediaDevice> getDevices(MediaType mediaType)
{
case AUDIO:
captureDeviceInfos
= mediaServiceImpl
.getDeviceConfiguration().getAvailableAudioCaptureDevices();
= getDeviceConfiguration().getAvailableAudioCaptureDevices();
break;
case VIDEO:
captureDeviceInfos
= mediaServiceImpl
.getDeviceConfiguration().getAvailableVideoCaptureDevices();
= getDeviceConfiguration().getAvailableVideoCaptureDevices();
break;
default:
captureDeviceInfos = null;
@ -135,4 +129,20 @@ public List<MediaDevice> getDevices(MediaType mediaType)
}
return captureDevices;
}
EncodingConfiguration getEncodingConfiguration()
{
return encodingConfiguration;
}
void start()
{
deviceConfiguration.initialize();
encodingConfiguration.initializeFormatPreferences();
encodingConfiguration.registerCustomPackages();
}
void stop()
{
}
}

@ -0,0 +1,626 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.io.*;
import java.net.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import javax.media.protocol.*;
import javax.media.rtp.*;
import javax.media.rtp.event.*;
import net.java.sip.communicator.impl.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.format.*;
import net.java.sip.communicator.util.*;
/**
* @author Lubomir Marinov
*/
public class MediaStreamImpl
extends AbstractMediaStream
implements ControllerListener,
ReceiveStreamListener,
SendStreamListener,
SessionListener
{
private static final Logger logger
= Logger.getLogger(MediaStreamImpl.class);
/**
* The name of the property indicating the length of our receive buffer.
*/
private static final String PROPERTY_NAME_RECEIVE_BUFFER_LENGTH
= "net.java.sip.communicator.impl.media.RECEIVE_BUFFER_LENGTH";
/**
* The <tt>MediaDevice</tt> this instance uses for both capture and playback
* of media.
*/
private CaptureMediaDevice device;
/**
* The <tt>RTPConnector</tt> through which this instance sends and receives
* RTP and RTCP traffic.
*/
private final RTPConnectorImpl rtpConnector;
private RTPManager rtpManager;
private MediaDirection startedDirection;
/**
* Initializes a new <tt>MediaStreamImpl</tt> instance which will use the
* specified <tt>MediaDevice</tt> for both capture and playback of media
* exchanged via the specified <tt>StreamConnector</tt>.
*
* @param connector the <tt>StreamConnector</tt> the new instance is to use
* for sending and receiving media
* @param device the <tt>MediaDevice</tt> the new instance is to use for
* both capture and playback of media exchanged via the specified
* <tt>StreamConnector</tt>
*/
public MediaStreamImpl(StreamConnector connector, MediaDevice device)
{
/*
* XXX Set the device early in order to make sure that its of the right
* type because we do not support just about any MediaDevice yet.
*/
setDevice(device);
this.rtpConnector = new RTPConnectorImpl(connector);
}
/*
* Implements MediaStream#close().
*/
public void close()
{
stop();
rtpConnector.removeTargets();
if (rtpManager != null)
{
rtpManager.removeReceiveStreamListener(this);
rtpManager.removeSendStreamListener(this);
rtpManager.removeSessionListener(this);
rtpManager.dispose();
rtpManager = null;
}
getDevice().close();
}
/*
* Implements ControllerListener#controllerUpdate(ControllerEvent).
*/
public void controllerUpdate(ControllerEvent event)
{
if (event instanceof RealizeCompleteEvent)
{
Player player = (Player) event.getSourceController();
if (player != null)
player.start();
}
}
private void createSendStreams()
{
RTPManager rtpManager = getRTPManager();
DataSource dataSource = getDevice().getDataSource();
int streamCount;
if (dataSource instanceof PushBufferDataSource)
{
PushBufferStream[] streams
= ((PushBufferDataSource) dataSource).getStreams();
streamCount = (streams == null) ? 0 : streams.length;
}
else if (dataSource instanceof PushDataSource)
{
PushSourceStream[] streams
= ((PushDataSource) dataSource).getStreams();
streamCount = (streams == null) ? 0 : streams.length;
}
else if (dataSource instanceof PullBufferDataSource)
{
PullBufferStream[] streams
= ((PullBufferDataSource) dataSource).getStreams();
streamCount = (streams == null) ? 0 : streams.length;
}
else if (dataSource instanceof PullDataSource)
{
PullSourceStream[] streams
= ((PullDataSource) dataSource).getStreams();
streamCount = (streams == null) ? 0 : streams.length;
}
else
streamCount = 1;
for (int streamIndex = 0; streamIndex < streamCount; streamIndex++)
{
Throwable exception = null;
try
{
rtpManager.createSendStream(dataSource, streamIndex);
}
catch (IOException ioe)
{
exception = ioe;
}
catch (UnsupportedFormatException ufe)
{
exception = ufe;
}
if (exception != null)
{
// TODO
logger
.error(
"Failed to create send stream for data source "
+ dataSource
+ " and stream index "
+ streamIndex,
exception);
}
}
}
/*
* Implements MediaStream#getDevice().
*/
public CaptureMediaDevice getDevice()
{
return device;
}
/*
* Implements MediaStream#getFormat().
*/
public MediaFormat getFormat()
{
return getDevice().getFormat();
}
/*
* Implements MediaStream#getLocalSourceID().
*/
public String getLocalSourceID()
{
// TODO Auto-generated method stub
return null;
}
/*
* Implements MediaStream#getRemoteControlAddress().
*/
public InetSocketAddress getRemoteControlAddress()
{
return
(InetSocketAddress)
rtpConnector.getControlSocket().getRemoteSocketAddress();
}
/*
* Implements MediaStream#getRemoteDataAddress().
*/
public InetSocketAddress getRemoteDataAddress()
{
return
(InetSocketAddress)
rtpConnector.getDataSocket().getRemoteSocketAddress();
}
/*
* Implements MediaStream#getRemoteSourceID().
*/
public String getRemoteSourceID()
{
// TODO Auto-generated method stub
return null;
}
private RTPManager getRTPManager()
{
if (rtpManager == null)
{
rtpManager = RTPManager.newInstance();
registerCustomCodecFormats(rtpManager);
rtpManager.addReceiveStreamListener(this);
rtpManager.addSendStreamListener(this);
rtpManager.addSessionListener(this);
/*
* It appears that if we don't do this managers don't play. You can
* try out some other buffer size to see if you can get better
* smoothness.
*/
BufferControl bc
= (BufferControl)
rtpManager.getControl(BufferControl.class.getName());
if (bc != null)
{
String buffStr
= NeomediaActivator
.getConfigurationService()
.getString(PROPERTY_NAME_RECEIVE_BUFFER_LENGTH);
long buff = 100;
try
{
if ((buffStr != null) && (buffStr.length() > 0))
buff = Long.parseLong(buffStr);
}
catch (NumberFormatException nfe)
{
logger
.warn(
buffStr
+ " is not a valid receive buffer/long value",
nfe);
}
buff = bc.setBufferLength(buff);
logger.trace("set receiver buffer len to " + buff);
bc.setEnabledThreshold(true);
bc.setMinimumThreshold(100);
}
rtpManager.initialize(rtpConnector);
createSendStreams();
}
return rtpManager;
}
protected void registerCustomCodecFormats(RTPManager rtpManager)
{
}
/*
* Implements MediaStream#setDevice(MediaDevice).
*/
public void setDevice(MediaDevice device)
{
this.device = (CaptureMediaDevice) device;
}
/*
* Implements MediaStream#setFormat(MediaFormat).
*/
public void setFormat(MediaFormat format)
{
getDevice().setFormat(format);
}
public void setTarget(MediaStreamTarget target)
{
rtpConnector.removeTargets();
if (target != null)
{
InetSocketAddress dataAddr = target.getDataAddress();
InetSocketAddress controlAddr = target.getControlAddress();
try
{
rtpConnector
.addTarget(
new SessionAddress(
dataAddr.getAddress(),
dataAddr.getPort(),
controlAddr.getAddress(),
controlAddr.getPort()));
}
catch (IOException ioe)
{
// TODO
logger.error("Failed to add target " + target, ioe);
}
}
}
/*
* Implements MediaStream#start().
*/
public void start()
{
start(MediaDirection.SENDRECV);
}
public void start(MediaDirection direction)
{
if (direction == null)
throw new IllegalArgumentException("direction");
if ((MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
&& (!MediaDirection.SENDRECV.equals(startedDirection)
&& !MediaDirection.SENDONLY.equals(startedDirection)))
{
RTPManager rtpManager = getRTPManager();
@SuppressWarnings("unchecked")
Iterable<SendStream> sendStreams = rtpManager.getSendStreams();
if (sendStreams != null)
for (SendStream sendStream : sendStreams)
try
{
// TODO Are we sure we want to connect here?
sendStream.getDataSource().connect();
sendStream.start();
}
catch (IOException ioe)
{
logger
.warn("Failed to start stream " + sendStream, ioe);
}
getDevice().start(MediaDirection.SENDONLY);
if (MediaDirection.RECVONLY.equals(startedDirection))
startedDirection = MediaDirection.SENDRECV;
else if (startedDirection == null)
startedDirection = MediaDirection.SENDONLY;
}
if ((MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.RECVONLY.equals(direction))
&& (!MediaDirection.SENDRECV.equals(startedDirection)
&& !MediaDirection.RECVONLY.equals(startedDirection)))
{
RTPManager rtpManager = getRTPManager();
Iterable<ReceiveStream> receiveStreams;
try
{
receiveStreams = rtpManager.getReceiveStreams();
}
catch (Exception e)
{
/*
* it appears that in early call states, when there are no
* streams this method could throw a null pointer exception.
* Make sure we handle it gracefully
*/
logger.trace("Failed to retrieve receive streams", e);
receiveStreams = null;
}
if (receiveStreams != null)
for (ReceiveStream receiveStream : receiveStreams)
try
{
DataSource receiveStreamDataSource
= receiveStream.getDataSource();
/*
* For an unknown reason, the stream DataSource can be
* null at the end of the Call after re-INVITEs have
* been handled.
*/
if (receiveStreamDataSource != null)
receiveStreamDataSource.start();
}
catch (IOException ioe)
{
logger
.warn(
"Failed to start stream " + receiveStream,
ioe);
}
getDevice().start(MediaDirection.RECVONLY);
if (MediaDirection.SENDONLY.equals(startedDirection))
startedDirection = MediaDirection.SENDONLY;
else if (startedDirection == null)
startedDirection = MediaDirection.RECVONLY;
}
}
/*
* Implements MediaStream#stop().
*/
public void stop()
{
stop(MediaDirection.SENDRECV);
}
public void stop(MediaDirection direction)
{
if (direction == null)
throw new IllegalArgumentException("direction");
if (rtpManager == null)
return;
if ((MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
&& (MediaDirection.SENDRECV.equals(startedDirection)
|| MediaDirection.SENDONLY.equals(startedDirection)))
{
@SuppressWarnings("unchecked")
Iterable<SendStream> sendStreams = rtpManager.getSendStreams();
if (sendStreams != null)
for (SendStream sendStream : sendStreams)
try
{
sendStream.getDataSource().stop();
sendStream.stop();
try
{
sendStream.close();
}
catch (NullPointerException npe)
{
/*
* Sometimes com.sun.media.rtp.RTCPTransmitter#bye()
* may throw NullPointerException but it does not
* seem to be guaranteed because it does not happen
* while debugging and stopping at a breakpoint on
* SendStream#close(). One of the cases in which it
* appears upon call hang-up is if we do not close
* the "old" SendStreams upon reinvite(s). Though we
* are now closing such SendStreams, ignore the
* exception here just in case because we already
* ignore IOExceptions.
*/
logger
.error(
"Failed to close stream " + sendStream,
npe);
}
}
catch (IOException ioe)
{
logger.warn("Failed to stop stream " + sendStream, ioe);
}
getDevice().stop(MediaDirection.SENDONLY);
if (MediaDirection.SENDRECV.equals(startedDirection))
startedDirection = MediaDirection.RECVONLY;
else if (MediaDirection.SENDONLY.equals(startedDirection))
startedDirection = null;
}
if ((MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.RECVONLY.equals(direction))
&& (MediaDirection.SENDRECV.equals(startedDirection)
|| MediaDirection.RECVONLY.equals(startedDirection)))
{
Iterable<ReceiveStream> receiveStreams;
try
{
receiveStreams = rtpManager.getReceiveStreams();
}
catch (Exception e)
{
/*
* it appears that in early call states, when there are no
* streams this method could throw a null pointer exception.
* Make sure we handle it gracefully
*/
logger.trace("Failed to retrieve receive streams", e);
receiveStreams = null;
}
if (receiveStreams != null)
for (ReceiveStream receiveStream : receiveStreams)
try
{
DataSource receiveStreamDataSource
= receiveStream.getDataSource();
/*
* For an unknown reason, the stream DataSource can be
* null at the end of the Call after re-INVITEs have
* been handled.
*/
if (receiveStreamDataSource != null)
receiveStreamDataSource.stop();
}
catch (IOException ioe)
{
logger
.warn(
"Failed to stop stream " + receiveStream,
ioe);
}
getDevice().stop(MediaDirection.RECVONLY);
if (MediaDirection.SENDRECV.equals(startedDirection))
startedDirection = MediaDirection.SENDONLY;
else if (MediaDirection.RECVONLY.equals(startedDirection))
startedDirection = null;
}
}
/*
* Implements ReceiveStreamListener#update(ReceiveStreamEvent).
*/
public void update(ReceiveStreamEvent event)
{
if (event instanceof NewReceiveStreamEvent)
{
ReceiveStream receiveStream = event.getReceiveStream();
if (receiveStream != null)
{
DataSource receiveStreamDataSource
= receiveStream.getDataSource();
if (receiveStreamDataSource != null)
{
Player player = null;
Throwable exception = null;
try
{
player = Manager.createPlayer(receiveStreamDataSource);
}
catch (IOException ioe)
{
exception = ioe;
}
catch (NoPlayerException npe)
{
exception = npe;
}
if (exception != null)
logger
.error(
"Failed to create player for new receive stream "
+ receiveStream,
exception);
else
{
player.addControllerListener(this);
player.realize();
}
}
}
}
}
/*
* Implements SendStreamListener#update(SendStreamEvent).
*/
public void update(SendStreamEvent event)
{
// TODO Auto-generated method stub
}
/*
* Implements SessionListener#update(SessionEvent).
*/
public void update(SessionEvent event)
{
// TODO Auto-generated method stub
}
}

@ -0,0 +1,220 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.util.*;
import javax.media.format.*;
import javax.sdp.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
/**
* Implements static utility methods used by media classes.
*
* @author Emil Ivov
*/
public class MediaUtils
{
/**
* Returned by {@link #sdpToJmfEncoding(String)} if it does not know the
* given encoding.
*/
public static final int UNKNOWN_ENCODING = -1;
/**
* Returns the String encoding, as specified in AudioFormat and VideoFormat,
* corresponding to the format specified in sdpFormatStr
*
* @param sdpEncodingStr the SDP index that we'd like to convert to a JMF
* format.
* @return one of the AudioFormat.XXX or VideoFormat.XXX format strings.
*/
public static String sdpToJmfEncoding(String sdpEncodingStr)
{
int sdpEncoding = UNKNOWN_ENCODING;
try
{
sdpEncoding = Integer.parseInt(sdpEncodingStr);
}
catch (NumberFormatException ex)
{
return null;
}
switch (sdpEncoding)
{
case SdpConstants.PCMU:
return AudioFormat.ULAW_RTP;
case SdpConstants.GSM:
return AudioFormat.GSM_RTP;
case SdpConstants.G723:
return AudioFormat.G723_RTP;
case SdpConstants.DVI4_8000:
return AudioFormat.DVI_RTP;
case SdpConstants.DVI4_16000:
return AudioFormat.DVI_RTP;
case SdpConstants.PCMA:
return Constants.ALAW_RTP;
case 97:
return Constants.ILBC_RTP;
case 98:
return Constants.ILBC_RTP;
case 110:
return Constants.SPEEX_RTP;
case SdpConstants.G728:
return AudioFormat.G728_RTP;
case SdpConstants.G729:
return AudioFormat.G729_RTP;
case SdpConstants.H263:
return VideoFormat.H263_RTP;
case SdpConstants.JPEG:
return VideoFormat.JPEG_RTP;
case SdpConstants.H261:
return VideoFormat.H261_RTP;
case Constants.H264_RTP_SDP:
return Constants.H264_RTP;
default:
return null;
}
}
/**
* Returns the SDP encoding number corresponding to <tt>jmfFormat</tt>.
*
* @param jmfEncoding one of the AudioFormat.XXX or VideoFormat.XXX format
* strings.
*
* @return the SDP index corresponding to <tt>jmfEncoding</tt>. Returns
* {@link #UNKNOWN_ENCODING} if the encoding is not supported or
* <code>jmfEncoding</code> is <code>null</code>.
*/
public static int jmfToSdpEncoding(String jmfEncoding)
{
if (jmfEncoding == null)
{
return UNKNOWN_ENCODING;
}
else if (jmfEncoding.equals(AudioFormat.ULAW_RTP))
{
return SdpConstants.PCMU;
}
else if (jmfEncoding.equals(Constants.ALAW_RTP))
{
return SdpConstants.PCMA;
}
else if (jmfEncoding.equals(AudioFormat.GSM_RTP))
{
return SdpConstants.GSM;
}
else if (jmfEncoding.equals(AudioFormat.G723_RTP))
{
return SdpConstants.G723;
}
else if (jmfEncoding.equals(AudioFormat.DVI_RTP))
{
return SdpConstants.DVI4_8000;
}
else if (jmfEncoding.equals(AudioFormat.DVI_RTP))
{
return SdpConstants.DVI4_16000;
}
else if (jmfEncoding.equals(AudioFormat.ALAW))
{
return SdpConstants.PCMA;
}
else if (jmfEncoding.equals(AudioFormat.G728_RTP))
{
return SdpConstants.G728;
}
else if (jmfEncoding.equals(AudioFormat.G729_RTP))
{
return SdpConstants.G729;
}
else if (jmfEncoding.equals(VideoFormat.H263_RTP))
{
return SdpConstants.H263;
}
else if (jmfEncoding.equals(VideoFormat.JPEG_RTP))
{
return SdpConstants.JPEG;
}
else if (jmfEncoding.equals(VideoFormat.H261_RTP))
{
return SdpConstants.H261;
}
else if (jmfEncoding.equals(Constants.H264_RTP))
{
return Constants.H264_RTP_SDP;
}
else if (jmfEncoding.equals(Constants.ILBC))
{
return 97;
}
else if (jmfEncoding.equals(Constants.ILBC_RTP))
{
return 97;
}
else if (jmfEncoding.equals(Constants.SPEEX))
{
return 110;
}
else if (jmfEncoding.equals(Constants.SPEEX_RTP))
{
return 110;
}
else
{
return UNKNOWN_ENCODING;
}
}
/**
* Converts the list of <tt>sdpEncodings</tt> to a list of jmf compatible
* encoding strings as specified by the static vars in VideoFormat and
* AudioFormat.
*
* @param sdpEncodings a list containing strings representing SDP format
* codes.
* @return a list of strings representing JMF compatible encoding names.
*/
public static List<String> sdpToJmfEncodings(List<String> sdpEncodings)
{
List<String> jmfEncodings = new ArrayList<String>();
if (sdpEncodings != null)
{
for (String sdpEncoding : sdpEncodings)
{
String jmfEncoding = sdpToJmfEncoding(sdpEncoding);
if (jmfEncoding != null)
{
jmfEncodings.add(jmfEncoding);
}
}
}
return jmfEncodings;
}
}

@ -0,0 +1,179 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import net.java.sip.communicator.service.configuration.*;
import net.java.sip.communicator.service.fileaccess.*;
import net.java.sip.communicator.service.gui.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.netaddr.*;
import net.java.sip.communicator.service.resources.*;
import net.java.sip.communicator.util.*;
import org.osgi.framework.*;
/**
* @author Martin Andre
* @author Emil Ivov
* @author Lubomir Marinov
*/
public class NeomediaActivator
implements BundleActivator
{
private final Logger logger = Logger.getLogger(NeomediaActivator.class);
private static BundleContext bundleContext;
private static ConfigurationService configurationService;
private static FileAccessService fileAccessService;
private static MediaServiceImpl mediaServiceImpl;
private static NetworkAddressManagerService networkAddressManagerService;
private static ResourceManagementService resources;
private ServiceRegistration mediaServiceRegistration;
/*
* Implements BundleActivator#start(BundleContext).
*/
public void start(BundleContext context)
throws Exception
{
logger.debug("Started.");
NeomediaActivator.bundleContext = context;
// MediaService
mediaServiceImpl = new MediaServiceImpl();
mediaServiceImpl.start();
mediaServiceRegistration
= context
.registerService(
MediaService.class.getName(),
mediaServiceImpl,
null);
logger.debug("Media Service ... [REGISTERED]");
// MediaConfigurationForm
context
.registerService(
ConfigurationForm.class.getName(),
new LazyConfigurationForm(
"net.java.sip.communicator.impl.neomedia.MediaConfigurationPanel",
getClass().getClassLoader(),
"plugin.mediaconfig.PLUGIN_ICON",
"impl.neomedia.configform.TITLE",
41),
null);
//we use the nist-sdp stack to make parse sdp and we need to set the
//following property to make sure that it would accept java generated
//IPv6 addresses that contain address scope zones.
System.setProperty("gov.nist.core.STRIP_ADDR_SCOPES", "true");
}
/*
* Implements BundleActivator#stop(BundleContext).
*/
public void stop(BundleContext context)
throws Exception
{
mediaServiceImpl.stop();
mediaServiceRegistration.unregister();
}
/**
* Returns a reference to the bundle context that we were started with.
*
* @return a reference to the BundleContext instance that we were started
* with.
*/
public static BundleContext getBundleContext()
{
return bundleContext;
}
/**
* Returns a reference to a ConfigurationService implementation currently
* registered in the bundle context or null if no such implementation was
* found.
*
* @return a currently valid implementation of the ConfigurationService.
*/
public static ConfigurationService getConfigurationService()
{
if (configurationService == null)
{
ServiceReference confReference
= bundleContext
.getServiceReference(ConfigurationService.class.getName());
configurationService
= (ConfigurationService)
bundleContext.getService(confReference);
}
return configurationService;
}
/**
* Returns a reference to a FileAccessService implementation
* currently registered in the bundle context or null if no such
* implementation was found.
*
* @return a currently valid implementation of the
* FileAccessService .
*/
public static FileAccessService getFileAccessService()
{
if (fileAccessService == null && bundleContext != null)
{
ServiceReference faReference
= bundleContext
.getServiceReference(FileAccessService.class.getName());
fileAccessService
= (FileAccessService) bundleContext.getService(faReference);
}
return fileAccessService;
}
/**
* Returns a reference to a NetworkAddressManagerService implementation
* currently registered in the bundle context or null if no such
* implementation was found.
*
* @return a currently valid implementation of the
* NetworkAddressManagerService .
*/
public static NetworkAddressManagerService getNetworkAddressManagerService()
{
if (networkAddressManagerService == null)
{
ServiceReference namReference
= bundleContext
.getServiceReference(
NetworkAddressManagerService.class.getName());
networkAddressManagerService
= (NetworkAddressManagerService)
bundleContext.getService(namReference);
}
return networkAddressManagerService;
}
public static ResourceManagementService getResources()
{
if (resources == null)
resources
= ResourceManagementServiceUtils.getService(bundleContext);
return resources;
}
}

@ -0,0 +1,329 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.io.*;
import java.net.*;
import javax.media.rtp.*;
import net.java.sip.communicator.service.neomedia.*;
/**
* @author Bing SU (nova.su@gmail.com)
* @author Lubomir Marinov
*/
public class RTPConnectorImpl
implements RTPConnector
{
/**
* The pair of datagram sockets for RTP and RTCP traffic that this instance
* uses in the form of a <tt>StreamConnector</tt>.
*/
private final StreamConnector connector;
/**
* RTCP packet input stream used by <tt>RTPManager</tt>.
*/
private RTPConnectorInputStream controlInputStream;
/**
* RTCP packet output stream used by <tt>RTPManager</tt>.
*/
private RTPConnectorOutputStream controlOutputStream;
/**
* The UDP socket this instance uses to send and receive RTCP packets.
*/
private DatagramSocket controlSocket;
/**
* RTP packet input stream used by <tt>RTPManager</tt>.
*/
private RTPConnectorInputStream dataInputStream;
/**
* RTP packet output stream used by <tt>RTPManager</tt>.
*/
private RTPConnectorOutputStream dataOutputStream;
/**
* The UDP socket this instance uses to send and receive RTP packets.
*/
private DatagramSocket dataSocket;
/**
* Initializes a new <tt>RTPConnectorImpl</tt> which is to use a given pair
* of datagram sockets for RTP and RTCP traffic specified in the form of a
* <tt>StreamConnector</tt>.
*
* @param connector the pair of datagram sockets for RTP and RTCP traffic
* the new instance is to use
*/
public RTPConnectorImpl(StreamConnector connector)
{
if (connector == null)
throw new NullPointerException("connector");
this.connector = connector;
}
/**
* Add a stream target. A stream target is the destination address which
* this RTP session will send its data to. For a single session, we can add
* multiple SessionAddresses, and for each address, one copy of data will be
* sent to.
*
* @param target Destination target address
* @throws IOException if there was a socket-related error while adding the
* specified target
*/
public void addTarget(SessionAddress target)
throws IOException
{
getControlOutputStream()
.addTarget(
target.getControlAddress(),
target.getControlPort());
getDataOutputStream()
.addTarget(
target.getDataAddress(),
target.getDataPort());
}
/*
* Implements RTPConnector#close().
*/
public void close()
{
dataOutputStream = null;
controlOutputStream = null;
if (dataInputStream != null)
{
dataInputStream.close();
dataInputStream = null;
}
if (controlInputStream != null)
{
controlInputStream.close();
controlInputStream = null;
}
connector.close();
}
/**
* Creates the RTCP packet input stream to be used by <tt>RTPManager</tt>.
*
* @return a new RTCP packet input stream to be used by <tt>RTPManager</tt>
* @throws IOException if an error occurs during the creation of the RTCP
* packet input stream
*/
protected RTPConnectorInputStream createControlInputStream()
throws IOException
{
return new RTPConnectorInputStream(getControlSocket());
}
/**
* Creates the RTCP packet output stream to be used by <tt>RTPManager</tt>.
*
* @return a new RTCP packet output stream to be used by <tt>RTPManager</tt>
* @throws IOException if an error occurs during the creation of the RTCP
* packet output stream
*/
protected RTPConnectorOutputStream createControlOutputStream()
throws IOException
{
return new RTPConnectorOutputStream(getControlSocket());
}
/**
* Creates the RTP packet input stream to be used by <tt>RTPManager</tt>.
*
* @return a new RTP packet input stream to be used by <tt>RTPManager</tt>
* @throws IOException if an error occurs during the creation of the RTP
* packet input stream
*/
protected RTPConnectorInputStream createDataInputStream()
throws IOException
{
return new RTPConnectorInputStream(getDataSocket());
}
/**
* Creates the RTP packet output stream to be used by <tt>RTPManager</tt>.
*
* @return a new RTP packet output stream to be used by <tt>RTPManager</tt>
* @throws IOException if an error occurs during the creation of the RTP
* packet output stream
*/
protected RTPConnectorOutputStream createDataOutputStream()
throws IOException
{
return new RTPConnectorOutputStream(getDataSocket());
}
/*
* Implements RTPConnector#getControlInputStream().
*/
public RTPConnectorInputStream getControlInputStream()
throws IOException
{
if (controlInputStream == null)
controlInputStream = createControlInputStream();
return controlInputStream;
}
/*
* Implements RTPConnector#getControlOutputStream().
*/
public RTPConnectorOutputStream getControlOutputStream()
throws IOException
{
if (controlOutputStream == null)
controlOutputStream = createControlOutputStream();
return controlOutputStream;
}
/**
* Gets the UDP Socket this instance uses to send and receive RTCP packets.
*
* @return the UDP Socket this instance uses to send and receive RTCP
* packets
*/
public DatagramSocket getControlSocket()
{
if (controlSocket == null)
controlSocket = connector.getControlSocket();
return controlSocket;
}
/*
* Implements RTPConnector#getDataInputStream().
*/
public RTPConnectorInputStream getDataInputStream()
throws IOException
{
if (dataInputStream == null)
dataInputStream = createDataInputStream();
return dataInputStream;
}
/*
* Implements RTPConnector#getDataOutputStream().
*/
public RTPConnectorOutputStream getDataOutputStream()
throws IOException
{
if (dataOutputStream == null)
dataOutputStream = createDataOutputStream();
return dataOutputStream;
}
/**
* Gets the UDP socket this instance uses to send and receive RTP packets.
*
* @return the UDP socket this instance uses to send and receive RTP packets
*/
public DatagramSocket getDataSocket()
{
if (dataSocket == null)
dataSocket = connector.getDataSocket();
return dataSocket;
}
/*
* Implements RTPConnector#getReceiveBufferSize().
*/
public int getReceiveBufferSize()
{
// Not applicable
return -1;
}
/*
* Implements RTPConnector#getRTCPBandwidthFraction().
*/
public double getRTCPBandwidthFraction()
{
// Not applicable
return -1;
}
/*
* Implements RTPConnector#getRTCPSenderBandwidthFraction().
*/
public double getRTCPSenderBandwidthFraction()
{
// Not applicable
return -1;
}
/*
* Implements RTPConnector#getSendBufferSize().
*/
public int getSendBufferSize()
{
// Not applicable
return -1;
}
/**
* Removes a target from our session. If a target is removed, there will be
* no data sent to that address.
*
* @param target Destination target to be removed
*/
public void removeTarget(SessionAddress target)
{
if (controlOutputStream != null)
controlOutputStream
.removeTarget(
target.getControlAddress(),
target.getControlPort());
if (dataOutputStream != null)
dataOutputStream
.removeTarget(
target.getDataAddress(),
target.getDataPort());
}
/**
* Remove all stream targets. After this operation is done. There will be
* no targets receiving data, so no data will be sent.
*/
public void removeTargets()
{
if (controlOutputStream != null)
controlOutputStream.removeTargets();
if (dataOutputStream != null)
dataOutputStream.removeTargets();
}
/*
* Implements RTPConnector#setReceiveBufferSize(int).
*/
public void setReceiveBufferSize(int size)
throws IOException
{
// Nothing should be done here :-)
}
/*
* Implements RTPConnector#setSendBufferSize(int).
*/
public void setSendBufferSize(int size)
throws IOException
{
// Nothing should be done here :-)
}
}

@ -0,0 +1,215 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.io.*;
import java.net.*;
import javax.media.protocol.*;
/**
* @author Bing SU (nova.su@gmail.com)
* @author Lubomir Marinov
*/
public class RTPConnectorInputStream
implements PushSourceStream,
Runnable
{
/**
* The value of the property <tt>controls</tt> of
* <tt>RTPConnectorInputStream</tt> when there are no controls. Explicitly
* defined in order to reduce unnecessary allocations.
*/
private static final Object[] EMPTY_CONTROLS = new Object[0];
/**
* Packet receive buffer
*/
private final byte[] buffer = new byte[65535];
/**
* Whether this stream is closed. Used to control the termination of worker
* thread.
*/
private boolean closed;
/**
* Caught an IO exception during read from socket
*/
private boolean ioError = false;
/**
* The packet data to be read out of this instance through its
* {@link #read(byte[], int, int)} method.
*/
private RawPacket pkt;
/**
* UDP socket used to receive data.
*/
private final DatagramSocket socket;
/**
* SourceTransferHandler object which is used to read packets.
*/
private SourceTransferHandler transferHandler;
/**
* Initializes a new <tt>RTPConnectorInputStream</tt> which is to receive
* packet data from a specific UDP socket.
*
* @param socket the UDP socket the new instance is to receive data from
*/
public RTPConnectorInputStream(DatagramSocket socket)
{
this.socket = socket;
closed = false;
new Thread(this).start();
}
/**
* Close this stream, stops the worker thread.
*/
public synchronized void close()
{
closed = true;
socket.close();
}
/**
* Creates a new <tt>RawPacket</tt> from a specific <tt>DatagramPacket</tt>
* in order to have this instance receive its packet data through its
* {@link #read(byte[], int, int)} method. Allows extenders to intercept the
* packet data and possibly filter and/or modify it.
*
* @param datagramPacket the <tt>DatagramPacket</tt> containing the packet
* data
* @return a new <tt>RawPacket</tt> containing the packet data of the
* specified <tt>DatagramPacket</tt> or possibly its modification;
* <tt>null</tt> to ignore the packet data of the specified
* <tt>DatagramPacket</tt> and not make it available to this instance
* through its {@link #read(byte[], int, int)} method
*/
protected RawPacket createRawPacket(DatagramPacket datagramPacket)
{
return
new RawPacket(
datagramPacket.getData(),
datagramPacket.getOffset(),
datagramPacket.getLength());
}
/*
* Implements SourceStream#endOfStream().
*/
public boolean endOfStream()
{
return false;
}
/*
* Implements SourceStream#getContentDescriptor().
*/
public ContentDescriptor getContentDescriptor()
{
return null;
}
/*
* Implements SourceStream#getContentLength().
*/
public long getContentLength()
{
return LENGTH_UNKNOWN;
}
/*
* Implements Controls#getControl(String).
*/
public Object getControl(String controlType)
{
return null;
}
/*
* Implements Controls#getControls().
*/
public Object[] getControls()
{
return EMPTY_CONTROLS;
}
/*
* Implements PushSourceStream#getMinimumTransferSize().
*/
public int getMinimumTransferSize()
{
return 2 * 1024; // twice the MTU size, just to be safe.
}
/*
* Implements PushSourceStream#read(byte[], int, int).
*/
public int read(byte[] inBuffer, int offset, int length)
throws IOException
{
if (ioError)
return -1;
int pktLength = pkt.getLength();
if (length < pktLength)
throw
new IOException("Input buffer not big enough for " + pktLength);
System
.arraycopy(
pkt.getBuffer(), pkt.getOffset(), inBuffer, offset, pktLength);
return pktLength;
}
/*
* Implements Runnable#run().
*/
public void run()
{
while (!closed)
{
DatagramPacket p = new DatagramPacket(buffer, 0, 65535);
try
{
socket.receive(p);
}
catch (IOException e)
{
ioError = true;
break;
}
pkt = createRawPacket(p);
/*
* If we got extended, the delivery of the packet may have been
* canceled.
*/
if ((pkt != null) && (transferHandler != null) && !closed)
transferHandler.transferData(this);
}
}
/*
* Implements PushSourceStream#setTransferHandler(SourceTransferHandler).
*/
public void setTransferHandler(SourceTransferHandler transferHandler)
{
if (!closed)
this.transferHandler = transferHandler;
}
}

@ -0,0 +1,141 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.io.*;
import java.net.*;
import java.util.*;
import javax.media.rtp.*;
/**
* @author Bing SU (nova.su@gmail.com)
* @author Lubomir Marinov
*/
public class RTPConnectorOutputStream
implements OutputDataStream
{
/**
* UDP socket used to send packet data
*/
private final DatagramSocket socket;
/**
* Stream targets' ip addresses and ports.
*/
protected final List<InetSocketAddress> targets
= new LinkedList<InetSocketAddress>();
/**
* Initializes a new <tt>RTPConnectorOutputStream</tt> which is to send
* packet data out through a specific UDP socket.
*
* @param socket the UDP socket used to send packet data out
*/
public RTPConnectorOutputStream(DatagramSocket socket)
{
this.socket = socket;
}
/**
* Add a target to stream targets list
*
* @param remoteAddr target ip address
* @param remotePort target port
*/
public void addTarget(InetAddress remoteAddr, int remotePort)
{
targets.add(new InetSocketAddress(remoteAddr, remotePort));
}
/**
* Creates a new <tt>RawPacket</tt> from a specific <tt>byte[]</tt> buffer
* in order to have this instance send its packet data through its
* {@link #write(byte[], int, int)} method. Allows extenders to intercept
* the packet data and possibly filter and/or modify it.
*
* @param buffer the packet data to be sent to the targets of this instance
* @param offset the offset of the packet data in <tt>buffer</tt>
* @param length the length of the packet data in <tt>buffer</tt>
* @return a new <tt>RawPacket</tt> containing the packet data of the
* specified <tt>byte[]</tt> buffer or possibly its modification;
* <tt>null</tt> to ignore the packet data of the specified <tt>byte[]</tt>
* buffer and not send it to the targets of this instance through its
* {@link #write(byte[], int, int)} method
*/
protected RawPacket createRawPacket(byte[] buffer, int offset, int length)
{
return new RawPacket(buffer, offset, length);
}
/**
* Remove a target from stream targets list
*
* @param remoteAddr target ip address
* @param remotePort target port
* @return true if the target is in stream target list and can be removed
* false if not
*/
public boolean removeTarget(InetAddress remoteAddr, int remotePort)
{
for (Iterator<InetSocketAddress> targetIter = targets.iterator();
targetIter.hasNext();)
{
InetSocketAddress target = targetIter.next();
if (target.getAddress().equals(remoteAddr)
&& (target.getPort() == remotePort))
{
targetIter.remove();
return true;
}
}
return false;
}
/**
* Remove all stream targets from this session.
*/
public void removeTargets()
{
targets.clear();
}
/*
* Implements OutputDataStream#write(byte[], int, int).
*/
public int write(byte[] buffer, int offset, int length)
{
RawPacket pkt = createRawPacket(buffer, offset, length);
/*
* If we got extended, the delivery of the packet may have been
* canceled.
*/
if (pkt == null)
return length;
for (InetSocketAddress target : targets)
try
{
socket
.send(
new DatagramPacket(
pkt.getBuffer(),
pkt.getOffset(),
pkt.getLength(),
target.getAddress(),
target.getPort()));
}
catch (IOException ex)
{
// TODO error handling
return -1;
}
return length;
}
}

@ -0,0 +1,239 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
/**
* When using TransformConnector, a RTP/RTCP packet is represented using
* RawPacket. RawPacket stores the buffer holding the RTP/RTCP packet, as well
* as the inner offset and length of RTP/RTCP packet data.
*
* After transformation, data is also store in RawPacket objects, either the
* original RawPacket (in place transformation), or a newly created RawPacket.
*
* Besides packet info storage, RawPacket also provides some other operations
* such as readInt() to ease the development process.
*
* @author Werner Dittmann (Werner.Dittmann@t-online.de)
* @author Bing SU (nova.su@gmail.com)
*/
public class RawPacket
{
/**
* Byte array storing the content of this Packet
*/
protected byte[] buffer;
/**
* Start offset of the packet data inside buffer.
* Usually this value would be 0. But in order to be compatible with
* RTPManager we store this info. (Not assuming the offset is always zero)
*/
protected int offset;
/**
* Length of this packet's data
*/
protected int length;
/**
* Construct a RawPacket using specified value.
*
* @param buffer Byte array holding the content of this Packet
* @param offset Start offset of packet content inside buffer
* @param length Length of the packet's data
*/
public RawPacket(byte[] buffer, int offset, int length)
{
this.buffer = buffer;
this.offset = offset;
this.length = length;
}
/**
* Get buffer containing the content of this packet
*
* @return buffer containing the content of this packet
*/
public byte[] getBuffer()
{
return this.buffer;
}
/**
* Get the length of this packet's data
*
* @return length of this packet's data
*/
public int getLength()
{
return this.length;
}
/**
* Get the start offset of this packet's data inside storing buffer
*
* @return start offset of this packet's data inside storing buffer
*/
public int getOffset()
{
return this.offset;
}
/**
* Read a integer from this packet at specified offset
*
* @param off start offset of the integer to be read
* @return the integer to be read
*/
public int readInt(int off)
{
return (this.buffer[this.offset + off + 0] << 24) |
((this.buffer[this.offset + off + 1] & 0xff) << 16) |
((this.buffer[this.offset + off + 2] & 0xff) << 8) |
(this.buffer[this.offset + off + 3] & 0xff);
}
/**
* Read a short from this packet at specified offset
*
* @param off start offset of this short
* @return short value at offset
*/
public short readShort(int off)
{
return (short) ((this.buffer[this.offset + off + 0] << 8) |
(this.buffer[this.offset + off + 1] & 0xff));
}
/**
* Read an unsigned short at specified offset as a int
*
* @param off start offset of the unsigned short
* @return the int value of the unsigned short at offset
*/
public int readUnsignedShortAsInt(int off)
{
int b1 = (0x000000FF & (this.buffer[this.offset + off + 0]));
int b2 = (0x000000FF & (this.buffer[this.offset + off + 1]));
int val = b1 << 8 | b2;
return val;
}
/**
* Read a byte from this packet at specified offset
*
* @param off start offset of the byte
* @return byte at offset
*/
public byte readByte(int off)
{
return buffer[offset + off];
}
/**
* Read an unsigned integer as long at specified offset
*
* @param off start offset of this unsigned integer
* @return unsigned integer as long at offset
*/
public long readUnsignedIntAsLong(int off)
{
int b0 = (0x000000FF & (this.buffer[this.offset + off + 0]));
int b1 = (0x000000FF & (this.buffer[this.offset + off + 1]));
int b2 = (0x000000FF & (this.buffer[this.offset + off + 2]));
int b3 = (0x000000FF & (this.buffer[this.offset + off + 3]));
return ((b0 << 24 | b1 << 16 | b2 << 8 | b3)) & 0xFFFFFFFFL;
}
/**
* Read a byte region from specified offset with specified length
*
* @param off start offset of the region to be read
* @param len length of the region to be read
* @return byte array of [offset, offset + length)
*/
public byte[] readRegion(int off, int len)
{
int startOffset = this.offset + off;
if (off < 0 || len <= 0
|| startOffset + len > this.buffer.length)
{
return null;
}
byte[] region = new byte[len];
System.arraycopy(this.buffer, startOffset, region, 0, len);
return region;
}
/**
* Read a byte region from specified offset with specified length in given buffer
*
* @param off start offset of the region to be read
* @param len length of the region to be read
* @param outBuff output buffer
*/
public void readRegionToBuff(int off, int len, byte[] outBuff)
{
int startOffset = this.offset + off;
if (off < 0 || len <= 0
|| startOffset + len > this.buffer.length)
{
return;
}
if (outBuff.length < len)
{
return;
}
System.arraycopy(this.buffer, startOffset, outBuff, 0, len);
}
/**
* Append a byte array to then end of the packet. This will change the data
* buffer of this packet.
*
* @param data byte array to append
* @param len the number of bytes to append
*/
public void append(byte[] data, int len)
{
if (data == null || len == 0)
{
return;
}
byte[] newBuffer = new byte[this.length + len];
System.arraycopy(this.buffer, this.offset, newBuffer, 0, this.length);
System.arraycopy(data, 0, newBuffer, this.length, len);
this.offset = 0;
this.length = this.length + len;
this.buffer = newBuffer;
}
/**
* Shrink the buffer of this packet by specified length
*
* @param len length to shrink
*/
public void shrink(int len)
{
if (len <= 0)
{
return;
}
this.length -= len;
if (this.length < 0)
{
this.length = 0;
}
}
}

@ -0,0 +1,172 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia;
import java.awt.*;
import java.util.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import javax.media.protocol.*;
import javax.media.rtp.*;
import net.java.sip.communicator.impl.neomedia.codec.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.event.*;
import net.java.sip.communicator.util.*;
/**
* @author Lubomir Marinov
*/
public class VideoMediaStreamImpl
extends MediaStreamImpl
implements VideoMediaStream
{
private static final Logger logger
= Logger.getLogger(VideoMediaStreamImpl.class);
/**
* JMF stores <tt>CUSTOM_CODEC_FORMATS</tt> statically, so they only need to
* be registered once. FMJ does this dynamically (per instance), so it needs
* to be done for every time we instantiate an RTP manager.
*/
private static boolean formatsRegisteredOnce = false;
public static Dimension selectVideoSize(
DataSource videoDS,
final int preferredWidth,
final int preferredHeight)
{
if(videoDS == null)
return null;
FormatControl formatControl =
(FormatControl) videoDS.getControl(FormatControl.class.getName());
if (formatControl == null)
return null;
Format[] formats = formatControl.getSupportedFormats();
final int count = formats.length;
if (count < 1)
return null;
Format selectedFormat = null;
if (count == 1)
selectedFormat = formats[0];
else
{
class FormatInfo
{
public final VideoFormat format;
public final double difference;
public FormatInfo(VideoFormat format)
{
this.format = format;
Dimension size = format.getSize();
int width = size.width;
double xScale =
(width == preferredWidth)
? 1
: (preferredWidth / (double) width);
int height = size.height;
double yScale =
(height == preferredHeight)
? 1
: (preferredHeight / (double) height);
difference = Math.abs(1 - Math.min(xScale, yScale));
}
}
FormatInfo[] infos = new FormatInfo[count];
for (int i = 0; i < count; i++)
{
FormatInfo info =
infos[i] = new FormatInfo((VideoFormat) formats[i]);
if (info.difference == 0)
{
selectedFormat = info.format;
break;
}
}
if (selectedFormat == null)
{
Arrays.sort(infos, new Comparator<FormatInfo>()
{
public int compare(FormatInfo info0, FormatInfo info1)
{
return Double.compare(info0.difference,
info1.difference);
}
});
selectedFormat = infos[0].format;
}
}
formatControl.setFormat(selectedFormat);
return ((VideoFormat) selectedFormat).getSize();
}
public VideoMediaStreamImpl(StreamConnector connector, MediaDevice device)
{
super(connector, device);
}
/*
* Implements VideoMediaStream#addVideoListener(VideoListener).
*/
public void addVideoListener(VideoListener listener)
{
// TODO Auto-generated method stub
}
/*
* Implements VideoMediaStream#getVisualComponent().
*/
public Component getVisualComponent()
{
// TODO Auto-generated method stub
return null;
}
protected void registerCustomCodecFormats(RTPManager rtpManager)
{
// if we have already registered custom formats and we are running JMF
// we bail out.
if (!FMJConditionals.REGISTER_FORMATS_WITH_EVERY_RTP_MANAGER
&& formatsRegisteredOnce)
return;
Format format = new VideoFormat(Constants.H264_RTP);
logger.debug("registering format " + format + " with RTP manager");
/*
* NOTE (mkoch@rowa.de): com.sun.media.rtp.RtpSessionMgr.addFormat leaks
* memory, since it stores the Format in a static Vector. AFAIK there is
* no easy way around it, but the memory impact should not be too bad.
*/
rtpManager
.addFormat(
format,
MediaUtils.jmfToSdpEncoding(format.getEncoding()));
formatsRegisteredOnce = true;
}
}

@ -6,18 +6,22 @@
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.awt.*;
import java.io.*;
import java.util.*;
import java.util.List;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import javax.media.protocol.*;
import net.java.sip.communicator.impl.media.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.impl.neomedia.format.*;
import net.java.sip.communicator.service.neomedia.*;
import net.java.sip.communicator.service.neomedia.device.*;
import net.java.sip.communicator.service.neomedia.format.*;
import net.java.sip.communicator.util.*;
/**
* Implements <tt>MediaDevice</tt> for the JMF <tt>CaptureDevice</tt>.
@ -27,24 +31,38 @@
public class CaptureMediaDevice
implements MediaDevice
{
private static final Logger logger
= Logger.getLogger(CaptureMediaDevice.class);
/**
* The JMF <tt>CaptureDevice</tt> this instance wraps and provides an
* implementation of <tt>MediaDevice</tt> for.
*/
private CaptureDevice captureDevice;
private DataSource captureDevice;
/**
* The <tt>CaptureDeviceInfo</tt> of {@link #captureDevice}.
*/
private final CaptureDeviceInfo captureDeviceInfo;
/**
* The indicator which determines whether {@link DataSource#connect()} has
* been successfully executed on {@link #captureDevice}.
*/
private boolean captureDeviceIsConnected;
/**
* The <tt>MediaType</tt> of this instance and the <tt>CaptureDevice</tt>
* that it wraps.
*/
private final MediaType mediaType;
/**
* The JMF <tt>Processor</tt> which transcodes {@link #captureDevice} into
* the format of this instance.
*/
private Processor processor;
/**
* Initializes a new <tt>CaptureMediaDevice</tt> instance which is to
* provide an implementation of <tt>MediaDevice</tt> for a specific
@ -61,7 +79,7 @@ public CaptureMediaDevice(CaptureDevice captureDevice, MediaType mediaType)
if (mediaType == null)
throw new NullPointerException("mediaType");
this.captureDevice = captureDevice;
this.captureDevice = (DataSource) captureDevice;
this.captureDeviceInfo = captureDevice.getCaptureDeviceInfo();
this.mediaType = mediaType;
}
@ -91,6 +109,115 @@ public CaptureMediaDevice(
this.mediaType = mediaType;
}
/**
* For JPEG and H263, we know that they only work for particular
* sizes. So we'll perform extra checking here to make sure they
* are of the right sizes.
*
* @param sourceFormat the original format that we'd like to check for
* size.
* @return the modified <tt>VideoFormat</tt> set to the size we support.
*/
private VideoFormat assertSize(VideoFormat sourceFormat)
{
int width, height;
// JPEG
if (sourceFormat.matches(new Format(VideoFormat.JPEG_RTP)))
{
Dimension size = sourceFormat.getSize();
// For JPEG, make sure width and height are divisible by 8.
width = (size.width % 8 == 0)
? size.width
: ( ( (size.width / 8)) * 8);
height = (size.height % 8 == 0)
? size.height
: (size.height / 8) * 8;
}
// H.263
else if (sourceFormat.matches(new Format(VideoFormat.H263_RTP)))
{
// For H.263, we only support some specific sizes.
//if (size.width < 128)
// {
// width = 128;
// height = 96;
// }
//else if (size.width < 176)
// {
// width = 176;
// height = 144;
// }
//else
// {
width = 352;
height = 288;
// }
}
else
{
// We don't know this particular format. We'll just
// leave it alone then.
return sourceFormat;
}
VideoFormat result = new VideoFormat(null,
new Dimension(width, height),
Format.NOT_SPECIFIED,
null,
Format.NOT_SPECIFIED);
return (VideoFormat) result.intersects(sourceFormat);
}
public void close()
{
if (captureDevice != null)
{
/*
* As reported by Carlos Alexandre, stopping before disconnecting
* resolves a slow disconnect on Linux.
*/
try
{
captureDevice.stop();
}
catch (IOException ex)
{
/*
* We cannot do much about the exception because we're not
* really interested in the stopping but rather in calling
* DataSource#disconnect() anyway.
*/
logger.error("Failed to properly stop avDataSource.", ex);
}
captureDevice.disconnect();
}
if (processor != null)
{
processor.stop();
if (processor.getState() == Processor.Realized)
{
DataSource dataOutput = processor.getDataOutput();
if (dataOutput != null)
dataOutput.disconnect();
}
processor.deallocate();
processor.close();
}
}
private Format findFirstMatchingFormat(Format[] formats, Format format)
{
for (Format match : formats)
if (match.isSameEncoding(format))
return match;
return null;
}
/**
* Gets the JMF <tt>CaptureDevice</tt> this instance wraps and provides an
* implementation of <tt>MediaDevice</tt> for.
@ -98,24 +225,117 @@ public CaptureMediaDevice(
* @return the JMF <tt>CaptureDevice</tt> this instance wraps and provides
* an implementation of <tt>MediaDevice</tt> for
*/
private CaptureDevice getCaptureDevice()
private DataSource getCaptureDevice()
{
if (captureDevice == null)
captureDevice
= (CaptureDevice)
MediaControl
.createDataSource(
captureDeviceInfo.getLocator());
{
try
{
captureDevice
= Manager.createDataSource(captureDeviceInfo.getLocator());
}
catch (IOException ioe)
{
// TODO
}
catch (NoDataSourceException ndse)
{
// TODO
}
}
return captureDevice;
}
/**
* Gets the JMF <tt>CaptureDevice</tt> this instance wraps and provides an
* implementation of <tt>MediaDevice</tt> for in a connected state. If the
* <tt>CaptureDevice</tt> is not connected to yet, first tries to connect to
* it. Returns <tt>null</tt> if this instance has failed to create a
* <tt>CaptureDevice</tt> instance or to connect to it.
*
* @return the JMF <tt>CaptureDevice</tt> this instance wraps and provides
* an implementation of <tt>MediaDevice</tt> for in a connected state;
* <tt>null</tt> if this instance has failed to create a
* <tt>CaptureDevice</tt> instance or to connect to it
*/
private DataSource getConnectedCaptureDevice()
{
DataSource captureDevice = getCaptureDevice();
if ((captureDevice != null) && !captureDeviceIsConnected)
{
Throwable exception = null;
try
{
captureDevice.connect();
}
catch (IOException ioe)
{
// TODO
exception = ioe;
}
catch (NullPointerException npe)
{
/*
* TODO The old media says it happens when the operating system
* does not support the operation.
*/
exception = npe;
}
if (exception == null)
{
captureDeviceIsConnected = true;
/*
* 1. Changing buffer size. The default buffer size (for
* javasound) is 125 milliseconds - 1/8 sec. On MacOS this leads
* to an exception and no audio capture. A value of 30 for the
* buffer fixes the problem and is OK when using some pstn
* gateways.
*
* 2. Changing to 60. When it is 30 there are some issues with
* asterisk and nat (we don't start to send stream and so
* asterisk rtp part doesn't notice that we are behind nat)
*
* 3. Do not set buffer length on linux as it completely breaks
* audio capture.
*/
String osName = System.getProperty("os.name");
if ((osName == null) || !osName.toLowerCase().contains("linux"))
{
Control bufferControl
= (Control)
captureDevice
.getControl(
"javax.media.control.BufferControl");
if (bufferControl != null)
((BufferControl) bufferControl)
.setBufferLength(60); // in milliseconds
}
}
else
captureDevice = null;
}
return captureDevice;
}
public DataSource getDataSource()
{
Processor processor = getProcessor();
return (processor == null) ? null : processor.getDataOutput();
}
/*
* Implements MediaDevice#getDirection(). Because CaptureDevice can only be
* read from, returns MediaDirection#SENDONLY.
* Implements MediaDevice#getDirection().
*/
public MediaDirection getDirection()
{
return MediaDirection.SENDONLY;
return MediaDirection.SENDRECV;
}
/*
@ -123,16 +343,23 @@ public MediaDirection getDirection()
*/
public MediaFormat getFormat()
{
MediaType mediaType = getMediaType();
Processor processor = getProcessor();
for (FormatControl formatControl
: getCaptureDevice().getFormatControls())
if (processor != null)
{
MediaFormat format
= MediaFormatImpl.createInstance(formatControl.getFormat());
MediaType mediaType = getMediaType();
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
if ((format != null) && format.getMediaType().equals(mediaType))
return format;
MediaFormat format
= MediaFormatImpl.createInstance(trackControl.getFormat());
if ((format != null) && format.getMediaType().equals(mediaType))
return format;
}
}
return null;
}
@ -145,80 +372,231 @@ public MediaType getMediaType()
return mediaType;
}
/**
* Gets the JMF <tt>Processor</tt> which transcodes the
* <tt>CaptureDevice</tt> wrapped by this instance into the format of this
* instance.
*
* @return the JMF <tt>Processor</tt> which transcodes the
* <tt>CaptureDevice</tt> wrapped by this instance into the format of this
* instance
*/
private Processor getProcessor()
{
if (processor == null)
{
DataSource captureDevice = getConnectedCaptureDevice();
if (captureDevice != null)
{
Processor processor = null;
try
{
processor = Manager.createProcessor(captureDevice);
}
catch (IOException ioe)
{
// TODO
}
catch (NoProcessorException npe)
{
// TODO
}
if (waitForState(processor, Processor.Configured))
{
try
{
processor
.setContentDescriptor(
new ContentDescriptor(
ContentDescriptor.RAW_RTP));
}
catch (NotConfiguredError nce)
{
// TODO
processor = null;
}
if (processor != null)
this.processor = processor;
}
else
processor = null;
}
}
return processor;
}
/*
* Implements MediaDevice#getSupportedFormats().
*/
public List<MediaFormat> getSupportedFormats()
{
return
(captureDevice == null)
? getSupportedFormats(captureDeviceInfo)
: getSupportedFormats(captureDevice);
Processor processor = getProcessor();
Set<Format> supportedFormats = new HashSet<Format>();
if (processor != null)
{
MediaType mediaType = getMediaType();
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
for (Format supportedFormat : trackControl.getSupportedFormats())
switch (mediaType)
{
case AUDIO:
if (supportedFormat instanceof AudioFormat)
supportedFormats.add(supportedFormat);
break;
case VIDEO:
if (supportedFormat instanceof VideoFormat)
supportedFormats.add(supportedFormat);
break;
}
}
}
List<MediaFormat> supportedMediaFormats
= new ArrayList<MediaFormat>(supportedFormats.size());
for (Format format : supportedFormats)
supportedMediaFormats.add(MediaFormatImpl.createInstance(format));
return supportedMediaFormats;
}
/**
* Gets the <tt>MediaFormat</tt>s supported by a specific
* <tt>CaptureDevice</tt>.
* Sets the <tt>MediaFormat</tt> in which this <tt>MediaDevice</tt> is to
* capture data.
*
* @param captureDevice the JMF <tt>CaptureDevice</tt> to retrieve the
* supported <tt>MediaFormat</tt>s of
* @return the <tt>MediaFormat</tt>s supported by the specified
* <tt>CaptureDevice</tt>
* @param format the <tt>MediaFormat</tt> in which this <tt>MediaDevice</tt>
* is to capture data
*/
private List<MediaFormat> getSupportedFormats(CaptureDevice captureDevice)
public void setFormat(MediaFormat format)
{
MediaType mediaType = getMediaType();
Set<Format> supportedFormats = new HashSet<Format>();
for (FormatControl formatControl : captureDevice.getFormatControls())
if (!mediaType.equals(format.getMediaType()))
throw new IllegalArgumentException("format");
/*
* We need javax.media.Format and we know how to convert MediaFormat to
* it only for MediaFormatImpl so assert early.
*/
MediaFormatImpl mediaFormatImpl = (MediaFormatImpl) format;
Processor processor = getProcessor();
if (processor != null)
{
for (Format format : formatControl.getSupportedFormats())
if ((processor.getState() < Processor.Configured)
&& !waitForState(processor, Processor.Configured))
{
// TODO
return;
}
for (TrackControl trackControl : processor.getTrackControls())
{
if (!trackControl.isEnabled())
continue;
Format[] supportedFormats = trackControl.getSupportedFormats();
if ((supportedFormats == null) || (supportedFormats.length < 1))
{
trackControl.setEnabled(false);
continue;
}
Format supportedFormat = null;
switch (mediaType)
{
case AUDIO:
if (format instanceof AudioFormat)
supportedFormats.add(format);
if (supportedFormats[0] instanceof AudioFormat)
{
if (FMJConditionals.FORCE_AUDIO_FORMAT != null)
trackControl
.setFormat(FMJConditionals.FORCE_AUDIO_FORMAT);
else
{
supportedFormat
= findFirstMatchingFormat(
supportedFormats,
mediaFormatImpl.getFormat());
}
}
break;
case VIDEO:
if (format instanceof VideoFormat)
supportedFormats.add(format);
if (supportedFormats[0] instanceof VideoFormat)
{
supportedFormat
= findFirstMatchingFormat(
supportedFormats,
mediaFormatImpl.getFormat());
if (supportedFormat != null)
supportedFormat
= assertSize((VideoFormat) supportedFormat);
}
break;
}
if (supportedFormat == null)
trackControl.setEnabled(false);
else
trackControl.setFormat(supportedFormat);
}
}
}
List<MediaFormat> supportedMediaFormats
= new ArrayList<MediaFormat>(supportedFormats.size());
public void start(MediaDirection direction)
{
if (direction == null)
throw new IllegalArgumentException("direction");
for (Format format : supportedFormats)
supportedMediaFormats.add(MediaFormatImpl.createInstance(format));
return supportedMediaFormats;
if (MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
{
Processor processor = getProcessor();
if ((processor != null)
&& (processor.getState() != Processor.Started))
processor.start();
}
}
public void stop(MediaDirection direction)
{
if (direction == null)
throw new IllegalArgumentException("direction");
if (MediaDirection.SENDRECV.equals(direction)
|| MediaDirection.SENDONLY.equals(direction))
if ((processor != null)
&& (processor.getState() == Processor.Started))
processor.start();
}
/**
* Gets the <tt>MediaFormat</tt>s supported by a <tt>CaptureDevice</tt>
* judging by its <tt>CaptureDeviceInfo</tt>.
* Waits for the specified JMF <tt>Processor</tt> to enter the specified
* <tt>state</tt> and returns <tt>true</tt> if <tt>processor</tt> has
* successfully entered <tt>state</tt> or <tt>false</tt> if <tt>process</tt>
* has failed to enter <tt>state</tt>.
*
* @param captureDeviceInfo the <tt>CaptureDeviceInfo</tt> to retrieve the
* supported <tt>MediaFormat</tt>s of
* @return the <tt>MediaFormat</tt>s supported by the specified
* <tt>CaptureDeviceInfo</tt>
* @param processor the JMF <tt>Processor</tt> to wait on
* @param state the state as defined by the respective <tt>Processor</tt>
* state constants to wait <tt>processor</tt> to enter
* @return <tt>true</tt> if <tt>processor</tt> has successfully entered
* <tt>state</tt>; otherwise, <tt>false</tt>
*/
private List<MediaFormat> getSupportedFormats(
CaptureDeviceInfo captureDeviceInfo)
private static boolean waitForState(Processor processor, int state)
{
Format[] supportedFormats = captureDeviceInfo.getFormats();
MediaType mediaType = getMediaType();
List<MediaFormat> supportedMediaFormats
= new ArrayList<MediaFormat>(supportedFormats.length);
for (Format format : supportedFormats)
{
MediaFormat mediaFormat = MediaFormatImpl.createInstance(format);
if ((mediaFormat != null)
&& mediaFormat.getMediaType().equals(mediaType))
supportedMediaFormats.add(mediaFormat);
}
return supportedMediaFormats;
return new ProcessorUtility().waitForState(processor, state);
}
}

@ -0,0 +1,773 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.util.*;
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.service.configuration.*;
import net.java.sip.communicator.util.*;
/**
* This class aims to provide a simple configuration interface for JMF. It
* retrieves stored configuration when started or listens to ConfigurationEvent
* for property changes and configures the JMF accordingly.
*
* @author Martin Andre
* @author Emil Ivov
* @author Lubomir Marinov
*/
@SuppressWarnings("unchecked")
public class DeviceConfiguration
extends PropertyChangeNotifier
{
/**
* The name of the <code>DeviceConfiguration</code> property which
* represents the device used by <code>DeviceConfiguration</code> for audio
* capture.
*/
public static final String AUDIO_CAPTURE_DEVICE = "AUDIO_CAPTURE_DEVICE";
/**
* The name of the <code>DeviceConfiguration</code> property which
* represents the device used by <code>DeviceConfiguration</code> for audio
* playback.
*/
public static final String AUDIO_PLAYBACK_DEVICE = "AUDIO_PLAYBACK_DEVICE";
/**
* The name of the <code>DeviceConfiguration</code> property which
* represents the device used by <code>DeviceConfiguration</code> for audio
* notify.
*/
public static final String AUDIO_NOTIFY_DEVICE = "AUDIO_NOTIFY_DEVICE";
/**
* The name of the <code>DeviceConfiguration</code> property which
* represents the device used by <code>DeviceConfiguration</code> for video
* capture.
*/
public static final String VIDEO_CAPTURE_DEVICE = "VIDEO_CAPTURE_DEVICE";
/**
* When audio is disabled the selected audio system is with name None.
*/
public static final String AUDIO_SYSTEM_NONE = "None";
/**
* JavaSound sound system.
*/
public static final String AUDIO_SYSTEM_JAVASOUND = "JavaSound";
/**
* PortAudio sound system.
*/
public static final String AUDIO_SYSTEM_PORTAUDIO = "PortAudio";
private static final String PROP_AUDIO_DEVICE =
"net.java.sip.communicator.impl.media.audiodev";
private static final String PROP_AUDIO_PLAYBACK_DEVICE =
"net.java.sip.communicator.impl.media.audio.playbackdev";
private static final String PROP_AUDIO_NOTIFY_DEVICE =
"net.java.sip.communicator.impl.media.audio.notifydev";
private static final String PROP_AUDIO_DEVICE_IS_DISABLED =
"net.java.sip.communicator.impl.media.audiodevIsDisabled";
private static final String PROP_VIDEO_DEVICE =
"net.java.sip.communicator.impl.media.videodev";
private static final String PROP_VIDEO_DEVICE_IS_DISABLED =
"net.java.sip.communicator.impl.media.videodevIsDisabled";
private static final CaptureDeviceInfo[] NO_CAPTURE_DEVICES =
new CaptureDeviceInfo[0];
private Logger logger = Logger.getLogger(DeviceConfiguration.class);
/**
* The device that we'll be using for audio capture.
*/
private CaptureDeviceInfo audioCaptureDevice = null;
private CaptureDeviceInfo audioPlaybackDevice = null;
private CaptureDeviceInfo audioNotifyDevice = null;
/**
* The device that we'll be using for video capture.
*/
private CaptureDeviceInfo videoCaptureDevice;
private static Vector<String> audioSystems = new Vector<String>();
private String audioSystem = null;
/**
* Default constructor.
*/
public DeviceConfiguration()
{
//dummy ... XXX do we really need it though?
}
/**
* Initializes capture devices.
*/
public void initialize()
{
// these seem to be throwing exceptions every now and then so we'll
// blindly catch them for now
try
{
JmfDeviceDetector.detectAndConfigureCaptureDevices();
extractConfiguredCaptureDevices();
}
catch (Exception ex)
{
logger.error("Failed to initialize media.", ex);
}
}
/**
* Detects capture devices configured through JMF and disable audio and/or
* video transmission if none were found. Stores found devices in
* audioCaptureDevice and videoCaptureDevice.
*/
private void extractConfiguredCaptureDevices()
{
ConfigurationService config
= NeomediaActivator.getConfigurationService();
logger.info("Scanning for configured Audio Devices.");
CaptureDeviceInfo[] audioCaptureDevices =
getAvailableAudioCaptureDevices();
if (config.getBoolean(PROP_AUDIO_DEVICE_IS_DISABLED, false))
{
audioCaptureDevice = null;
audioSystem = AUDIO_SYSTEM_NONE;
}
else if (audioCaptureDevices.length < 1)
{
logger.warn("No Audio Device was found.");
audioCaptureDevice = null;
audioSystem = AUDIO_SYSTEM_NONE;
}
else
{
logger.debug("Found " + audioCaptureDevices.length
+ " capture devices: " + audioCaptureDevices);
String audioDevName = config.getString(PROP_AUDIO_DEVICE);
if(audioDevName == null)
{
// the default behaviour if nothing set is to use javasound
// this will also choose the capture device
setAudioSystem(AUDIO_SYSTEM_JAVASOUND, null);
}
else
{
for (CaptureDeviceInfo captureDeviceInfo : audioCaptureDevices)
{
if (audioDevName.equals(captureDeviceInfo.getName()))
{
setAudioSystem(getAudioSystem(captureDeviceInfo),
captureDeviceInfo);
break;
}
}
if(getAudioSystem() == null)
{
logger.warn("Computer sound config changed or " +
"there is a problem since last config was saved, " +
"will back to default javasound");
setAudioPlaybackDevice(null);
setAudioNotifyDevice(null);
setAudioCaptureDevice(null);
setAudioSystem(AUDIO_SYSTEM_JAVASOUND, null);
}
}
if (audioCaptureDevice != null)
logger.info("Found " + audioCaptureDevice.getName()
+ " as an audio capture device.");
}
if (config.getBoolean(PROP_VIDEO_DEVICE_IS_DISABLED, false))
videoCaptureDevice = null;
else
{
logger.info("Scanning for configured Video Devices.");
videoCaptureDevice =
extractConfiguredVideoCaptureDevice(VideoFormat.RGB);
// no RGB camera found. And what about YUV ?
if (videoCaptureDevice == null)
{
videoCaptureDevice =
extractConfiguredVideoCaptureDevice(VideoFormat.YUV);
if (videoCaptureDevice == null)
logger.info("No Video Device was found.");
}
}
}
private CaptureDeviceInfo extractConfiguredVideoCaptureDevice(String format)
{
List<CaptureDeviceInfo> videoCaptureDevices =
CaptureDeviceManager.getDeviceList(new VideoFormat(format));
CaptureDeviceInfo videoCaptureDevice = null;
if (videoCaptureDevices.size() > 0)
{
String videoDevName
= NeomediaActivator
.getConfigurationService().getString(PROP_VIDEO_DEVICE);
if (videoDevName == null)
videoCaptureDevice = videoCaptureDevices.get(0);
else
{
for (CaptureDeviceInfo captureDeviceInfo : videoCaptureDevices)
{
if (videoDevName.equals(captureDeviceInfo.getName()))
{
videoCaptureDevice = captureDeviceInfo;
break;
}
}
}
if (videoCaptureDevice != null)
logger.info("Found " + videoCaptureDevice.getName()
+ " as an RGB Video Device.");
}
return videoCaptureDevice;
}
/**
* Returns a device that we could use for audio capture.
*
* @return the CaptureDeviceInfo of a device that we could use for audio
* capture.
*/
public CaptureDeviceInfo getAudioCaptureDevice()
{
return audioCaptureDevice;
}
/**
* Gets the list of audio capture devices which are available through this
* <code>DeviceConfiguration</code>, amongst which is
* {@link #getAudioCaptureDevice()} and represent acceptable values
* for {@link #setAudioCaptureDevice(CaptureDeviceInfo)}
*
* @return an array of <code>CaptureDeviceInfo</code> describing the audio
* capture devices available through this
* <code>DeviceConfiguration</code>
*/
public CaptureDeviceInfo[] getAvailableAudioCaptureDevices()
{
Vector<CaptureDeviceInfo> audioCaptureDevices =
CaptureDeviceManager.getDeviceList(new AudioFormat(
AudioFormat.LINEAR, 44100, 16, 1));// 1 means 1 channel for mono
return audioCaptureDevices.toArray(NO_CAPTURE_DEVICES);
}
/**
* Gets the list of audio capture devices which are available through this
* <code>DeviceConfiguration</code>, amongst which is
* {@link #getAudioCaptureDevice()} and represent acceptable values
* for {@link #setAudioCaptureDevice(CaptureDeviceInfo)}
*
* @param soundSystem
* filter capture devices only from the supplied audio system.
*
* @return an array of <code>CaptureDeviceInfo</code> describing the audio
* capture devices available through this
* <code>DeviceConfiguration</code>
*/
public CaptureDeviceInfo[] getAvailableAudioCaptureDevices(String soundSystem)
{
String protocol = null;
if(soundSystem.equals(AUDIO_SYSTEM_JAVASOUND))
protocol = "javasound";
else if(soundSystem.equals(AUDIO_SYSTEM_PORTAUDIO))
protocol = "portaudio";
Vector<CaptureDeviceInfo> res = new Vector<CaptureDeviceInfo>();
if(protocol != null)
{
CaptureDeviceInfo[] all = getAvailableAudioCaptureDevices();
for(int i = 0; i < all.length; i++)
{
CaptureDeviceInfo cDeviceInfo = all[i];
if(cDeviceInfo.getLocator().getProtocol().equals(protocol))
{
res.add(cDeviceInfo);
}
}
}
return res.toArray(NO_CAPTURE_DEVICES);
}
/**
* Lists all the playback devices. These are only portaudio devices
* as we can only set particular device for playback when using portaudio.
*
* @return the devices that can be used for playback.
*/
public CaptureDeviceInfo[] getAvailableAudioPlaybackDevices()
{
return PortAudioAuto.playbackDevices;
}
/**
* Gets the list of video capture devices which are available through this
* <code>DeviceConfiguration</code>, amongst which is
* {@link #getVideoCaptureDevice()} and represent acceptable values
* for {@link #setVideoCaptureDevice(CaptureDeviceInfo)}
*
* @return an array of <code>CaptureDeviceInfo</code> describing the video
* capture devices available through this
* <code>DeviceConfiguration</code>
*/
public CaptureDeviceInfo[] getAvailableVideoCaptureDevices()
{
Set<CaptureDeviceInfo> videoCaptureDevices =
new HashSet<CaptureDeviceInfo>();
videoCaptureDevices.addAll(CaptureDeviceManager
.getDeviceList(new VideoFormat(VideoFormat.RGB)));
videoCaptureDevices.addAll(CaptureDeviceManager
.getDeviceList(new VideoFormat(VideoFormat.YUV)));
return videoCaptureDevices.toArray(NO_CAPTURE_DEVICES);
}
/**
* Returns a device that we could use for video capture.
*
* @return the CaptureDeviceInfo of a device that we could use for video
* capture.
*/
public CaptureDeviceInfo getVideoCaptureDevice()
{
return videoCaptureDevice;
}
/**
* Sets the device which is to be used by this
* <code>DeviceConfiguration</code> for video capture.
*
* @param device a <code>CaptureDeviceInfo</code> describing device to be
* used by this <code>DeviceConfiguration</code> for video
* capture
*/
public void setVideoCaptureDevice(CaptureDeviceInfo device)
{
if (videoCaptureDevice != device)
{
CaptureDeviceInfo oldDevice = videoCaptureDevice;
videoCaptureDevice = device;
ConfigurationService config
= NeomediaActivator.getConfigurationService();
config.setProperty(PROP_VIDEO_DEVICE_IS_DISABLED,
videoCaptureDevice == null);
if (videoCaptureDevice != null)
config.setProperty(PROP_VIDEO_DEVICE, videoCaptureDevice
.getName());
firePropertyChange(VIDEO_CAPTURE_DEVICE, oldDevice, device);
}
}
/**
* Sets the device which is to be used by this
* <code>DeviceConfiguration</code> for audio capture.
*
* @param device a <code>CaptureDeviceInfo</code> describing the device to
* be used by this <code>DeviceConfiguration</code> for audio
* capture
*/
public void setAudioCaptureDevice(CaptureDeviceInfo device)
{
if (audioCaptureDevice != device)
{
CaptureDeviceInfo oldDevice = audioCaptureDevice;
audioCaptureDevice = device;
ConfigurationService config
= NeomediaActivator.getConfigurationService();
if (audioCaptureDevice != null)
{
config.setProperty(PROP_AUDIO_DEVICE, audioCaptureDevice
.getName());
}
else
config.setProperty(PROP_AUDIO_DEVICE, null);
firePropertyChange(AUDIO_CAPTURE_DEVICE, oldDevice, device);
}
}
/**
* Enable or disable Audio stream transmission.
*
* @return true if audio capture is supported and false otherwise.
*/
public boolean isAudioCaptureSupported()
{
return this.audioCaptureDevice != null;
}
/**
* Enable or disable Video stream transmission.
*
* @return true if audio capture is supported and false otherwise.
*/
public boolean isVideoCaptureSupported()
{
return this.videoCaptureDevice != null;
}
/**
* Return the installed Audio Systems.
* @return the audio systems names.
*/
public String[] getAvailableAudioSystems()
{
return audioSystems.toArray(new String[0]);
}
/**
* Adds audio system.
* @param audioSystemName the name of the audio system.
*/
public static void addAudioSystem(String audioSystemName)
{
audioSystems.add(audioSystemName);
}
/**
* The current selected audio system.
* @return the name of the current audio system.
*/
public String getAudioSystem()
{
return audioSystem;
}
private String getAudioSystem(CaptureDeviceInfo cdi)
{
String res = null;
// Here we iterate over the available audio systems
// to be sure that the audio system
// is available and enabled on the system we are running on
if(cdi.getLocator().getProtocol().equals("javasound"))
{
Iterator<String> iter = audioSystems.iterator();
while (iter.hasNext())
{
String asName = iter.next();
if(asName.equals(AUDIO_SYSTEM_JAVASOUND))
res = asName;
}
}
else if(cdi.getLocator().getProtocol().equals("portaudio"))
{
Iterator<String> iter = audioSystems.iterator();
while (iter.hasNext())
{
String asName = iter.next();
if(asName.equals(AUDIO_SYSTEM_PORTAUDIO))
res = asName;
}
}
if(res == null)
res = AUDIO_SYSTEM_NONE;
return res;
}
/**
* Changes the current audio system.
* When javasound is selected we also change the capture device.
*
* @param name the name of the audio system.
* @param captureDevice the selected capture device, if is null we will
* choose a default one. Param used when first time initing and
* extracting config.
*/
public void setAudioSystem(String name, CaptureDeviceInfo captureDevice)
{
ConfigurationService config
= NeomediaActivator.getConfigurationService();
audioSystem = name;
if(name.equals(AUDIO_SYSTEM_NONE))
{
setAudioCaptureDevice(null);
setAudioNotifyDevice(null);
setAudioPlaybackDevice(null);
}
else if(name.equals(AUDIO_SYSTEM_JAVASOUND))
{
setAudioNotifyDevice(null);
setAudioPlaybackDevice(null);
// as there is only one device for javasound
// lets search for it
if(captureDevice != null)
setAudioCaptureDevice(captureDevice);
else
{
CaptureDeviceInfo[] audioCaptureDevices =
getAvailableAudioCaptureDevices();
for (CaptureDeviceInfo captureDeviceInfo : audioCaptureDevices)
{
if(captureDeviceInfo.getLocator().getProtocol().
equals("javasound"))
{
setAudioCaptureDevice(captureDeviceInfo);
break;
}
}
}
// if we have inited the audiocaptureDevice, it means javasound is
// available and everything is ok
if (audioCaptureDevice != null)
{
removePortAudioRenderer();
initJavaSoundRenderer();
}
}
else if(name.equals(AUDIO_SYSTEM_PORTAUDIO))
{
// changed to portaudio, so lets clear current device selection
// as we must select them
// if this is first time call devices will be already null
// and nothing will happen
setAudioCaptureDevice(null);
setAudioNotifyDevice(null);
setAudioPlaybackDevice(null);
// we don't save anything cause it will be saved
// when the devices are stored
// if nothing is set we consider it as not configured
// so when we restart we will end up with default config
// till restart will use latest config
// capture device is not null when we are called for the
// first time, we will also extract playback devices here
if(captureDevice != null)
{
setAudioCaptureDevice(captureDevice);
String audioDevName = config.getString(PROP_AUDIO_NOTIFY_DEVICE);
if(audioDevName != null)
{
for (CaptureDeviceInfo captureDeviceInfo :
PortAudioAuto.playbackDevices)
{
if (audioDevName.equals(captureDeviceInfo.getName()))
{
this.audioNotifyDevice = captureDeviceInfo;
break;
}
}
}
audioDevName = config.getString(PROP_AUDIO_PLAYBACK_DEVICE);
if(audioDevName != null)
{
for (CaptureDeviceInfo captureDeviceInfo :
PortAudioAuto.playbackDevices)
{
if (audioDevName.equals(captureDeviceInfo.getName()))
{
this.audioPlaybackDevice = captureDeviceInfo;
setDeviceToRenderer(audioPlaybackDevice);
removeJavaSoundRenderer();
break;
}
}
}
}
// return here to prevent clearing the last config that was saved
return;
}
else
{
// not expected behaviour
logger.error("Unknown audio system! Name:" + name);
audioSystem = null;
}
config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED,
audioCaptureDevice == null);
}
/**
* Installs the PortAudio Renderer
*/
protected static void initPortAudioRenderer()
{
PlugInManager.addPlugIn(
"net.java.sip.communicator.impl.media.renderer.audio.PortAudioRenderer",
net.java.sip.communicator.impl.media.renderer.audio.
PortAudioRenderer.supportedInputFormats,
null,
PlugInManager.RENDERER);
}
private void removeJavaSoundRenderer()
{
PlugInManager.removePlugIn(
"com.sun.media.renderer.audio.JavaSoundRenderer",
PlugInManager.RENDERER);
}
private void removePortAudioRenderer()
{
PlugInManager.removePlugIn(
"net.java.sip.communicator.impl.media.renderer.audio.PortAudioRenderer",
PlugInManager.RENDERER);
}
private void initJavaSoundRenderer()
{
try
{
PlugInManager.addPlugIn(
"com.sun.media.renderer.audio.JavaSoundRenderer",
new com.sun.media.renderer.audio.JavaSoundRenderer()
.getSupportedInputFormats(),
null,
PlugInManager.RENDERER);
}
catch (Exception e)
{
// if class is missing
logger.error("Problem init javasound renderer", e);
}
}
private void setDeviceToRenderer(CaptureDeviceInfo devInfo)
{
// no need to change device to renderer it will not be used anyway
if(devInfo == null)
return;
try
{
net.java.sip.communicator.impl.media.renderer.audio.
PortAudioRenderer.setDevice(devInfo.getLocator());
}
catch (Exception e)
{
logger.error("error setting device to renderer", e);
}
}
/**
* @return the audioPlaybackDevice
*/
public CaptureDeviceInfo getAudioPlaybackDevice()
{
return audioPlaybackDevice;
}
/**
* @return the audioNotifyDevice
*/
public CaptureDeviceInfo getAudioNotifyDevice()
{
return audioNotifyDevice;
}
/**
* @param audioPlaybackDevice the audioPlaybackDevice to set
*/
public void setAudioPlaybackDevice(CaptureDeviceInfo audioPlaybackDevice)
{
if(this.audioPlaybackDevice != audioPlaybackDevice)
{
CaptureDeviceInfo oldDev = this.audioPlaybackDevice;
this.audioPlaybackDevice = audioPlaybackDevice;
setDeviceToRenderer(audioPlaybackDevice);
// we changed playback device, so we are using portaudio
// lets use it, remove javasound renderer to be sure
// its not used anymore and install the portaudio one
removeJavaSoundRenderer();
initPortAudioRenderer();
ConfigurationService config
= NeomediaActivator.getConfigurationService();
if (audioPlaybackDevice != null)
{
config.setProperty(PROP_AUDIO_PLAYBACK_DEVICE,
audioPlaybackDevice.getName());
config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, false);
}
else
config.setProperty(PROP_AUDIO_PLAYBACK_DEVICE, null);
firePropertyChange(AUDIO_PLAYBACK_DEVICE,
oldDev, audioPlaybackDevice);
}
}
/**
* @param audioNotifyDevice the audioNotifyDevice to set
*/
public void setAudioNotifyDevice(CaptureDeviceInfo audioNotifyDevice)
{
if(this.audioNotifyDevice != audioNotifyDevice)
{
CaptureDeviceInfo oldDev = this.audioNotifyDevice;
this.audioNotifyDevice = audioNotifyDevice;
ConfigurationService config
= NeomediaActivator.getConfigurationService();
if (audioNotifyDevice != null)
{
config.setProperty(PROP_AUDIO_NOTIFY_DEVICE,
audioNotifyDevice.getName());
// atleast notify or playback must be set to consider
// portaudio for enabled
config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, false);
}
else
config.setProperty(PROP_AUDIO_NOTIFY_DEVICE, null);
firePropertyChange(AUDIO_NOTIFY_DEVICE,
oldDev, audioNotifyDevice);
}
}
}

@ -0,0 +1,35 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.lang.reflect.*;
/**
* FMJ auto-detect of CIVIL video capture devices.
*
* @author Ken Larson
*/
public class FMJCivilVideoAuto
{
/**
* Creates an instance of FMJCivilVideoAuto and auto-detects CIVIL video
* capture devices.
*
* @throws java.lang.Exception if FMJ is not present in the classpath or if
* detection fails for some other reason.
*/
public FMJCivilVideoAuto() throws Exception
{
// Done using reflection to avoid compile-time dependency on FMJ:
//new net.sf.fmj.media.cdp.civil.CaptureDevicePlugger().addCaptureDevices();
final Class<?> clazz
= Class.forName("net.sf.fmj.media.cdp.civil.CaptureDevicePlugger");
final Method addCaptureDevices = clazz.getMethod("addCaptureDevices");
final Object captureDevicePlugger = clazz.newInstance();
addCaptureDevices.invoke(captureDevicePlugger);
}
}

@ -0,0 +1,36 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.lang.reflect.Method;
/**
* Auto-detection of FMJ audio capture devices.
*
* @author Ken Larson
*/
public class FMJJavaSoundAuto
{
/**
* Creates an instance of FMJJavaSoundAuto and auto-detects FMJ audio
* capture devices.
*
* @throws java.lang.Exception if FMJ is not present in the classpath or if
* detection fails for some other reason.
*/
public FMJJavaSoundAuto() throws Exception
{
// Done using reflection to avoid compile-time dependency on FMJ:
//new net.sf.fmj.media.cdp.javasound.CaptureDevicePlugger()
//.addCaptureDevices();
final Class<?> clazz = Class.forName(
"net.sf.fmj.media.cdp.javasound.CaptureDevicePlugger");
final Method addCaptureDevices = clazz.getMethod("addCaptureDevices");
final Object captureDevicePlugger = clazz.newInstance();
addCaptureDevices.invoke(captureDevicePlugger);
}
}

@ -0,0 +1,87 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*
* File based on:
* @(#)JavaSoundAuto.java 1.2 01/03/13
* Copyright (c) 1999-2001 Sun Microsystems, Inc. All Rights Reserved.
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.util.*;
import javax.media.*;
import net.java.sip.communicator.util.*;
/**
* Detects javasound and registers capture devices.
* @author damencho
*/
public class JavaSoundAuto {
private static final Logger logger = Logger.getLogger(JavaSoundAuto.class);
private static final String detectClass =
"net.java.sip.communicator.impl.neomedia.device.JavaSoundDetector";
CaptureDeviceInfo[] devices = null;
public static void main(String[] args) {
new JavaSoundAuto();
System.exit(0);
}
@SuppressWarnings("unchecked") //legacy JMF code.
public JavaSoundAuto() {
boolean supported = false;
// instance JavaSoundDetector to check is javasound's capture is availabe
try {
Class<?> cls = Class.forName(detectClass);
JavaSoundDetector detect = (JavaSoundDetector)cls.newInstance();
supported = detect.isSupported();
} catch (Throwable thr) {
supported = false;
logger.error("Failed detecting java sound audio", thr);
}
logger.info("JavaSound Capture Supported = " + supported);
if (supported) {
// It's there, start to register JavaSound with CaptureDeviceManager
Vector<CaptureDeviceInfo> devices
= (Vector) CaptureDeviceManager.getDeviceList(null).clone();
// remove the old javasound capturers
String name;
Enumeration<CaptureDeviceInfo> enumeration = devices.elements();
while (enumeration.hasMoreElements()) {
CaptureDeviceInfo cdi = enumeration.nextElement();
name = cdi.getName();
if (name.startsWith("JavaSound"))
CaptureDeviceManager.removeDevice(cdi);
}
// collect javasound capture device info from JavaSoundSourceStream
// and register them with CaptureDeviceManager
CaptureDeviceInfo[] cdi
= com.sun.media.protocol.javasound.JavaSoundSourceStream
.listCaptureDeviceInfo();
if ( cdi != null ){
for (int i = 0; i < cdi.length; i++)
CaptureDeviceManager.addDevice(cdi[i]);
try {
CaptureDeviceManager.commit();
logger.info("JavaSoundAuto: Committed ok");
} catch (java.io.IOException ioe) {
logger.error("JavaSoundAuto: error committing cdm");
}
}
// now add it as available audio system to DeviceConfiguration
DeviceConfiguration.addAudioSystem(
DeviceConfiguration.AUDIO_SYSTEM_JAVASOUND);
}
}
}

@ -0,0 +1,33 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*
* File based on:
* @(#)JavaSoundDetector.java 1.2 01/03/13
* Copyright (c) 1999-2001 Sun Microsystems, Inc. All Rights Reserved.
*/
package net.java.sip.communicator.impl.neomedia.device;
import javax.sound.sampled.*;
public class JavaSoundDetector {
boolean supported = false;
public JavaSoundDetector() {
try {
DataLine.Info info = new DataLine.Info(TargetDataLine.class,
null,
AudioSystem.NOT_SPECIFIED);
supported = AudioSystem.isLineSupported(info);
} catch (Exception ex) {
supported = false;
}
}
public boolean isSupported() {
return supported;
}
}

@ -0,0 +1,457 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*
* File based on:
* @(#)JMFInit.java 1.14 03/04/30
* Copyright (c) 1996-2002 Sun Microsystems, Inc. All rights reserved.
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.io.*;
import java.util.*;
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.service.fileaccess.*;
import net.java.sip.communicator.util.*;
import com.sun.media.util.*;
/**
* Probes for available capture and playback devices and initializes the
* jmf.properties accordingly.
*
* @author Emil Ivov
* @author Ken Larson
* @author Lubomir Marinov
*/
public class JmfDeviceDetector
{
private static final Logger logger
= Logger.getLogger(JmfDeviceDetector.class);
/**
* The JMF property that specifies whether we'd have the right to capture
* when run from webstart or an applet.
*/
private static final String PROP_ALLOW_CAPTURE_FROM_APPLETS
= "secure.allowCaptureFromApplets";
/**
* The JMF property that specifies whether we'd have the right to save
* files when run from webstart or an applet.
*/
private static final String PROP_ALLOW_SAVE_FILE_FROM_APPLETS
= "secure.allowSaveFileFromApplets";
/**
* The JMF registry property that specifies that have initilized the
* currently valid repository.
*/
private static final String PROP_REGISTRY_AUTHOR
= "registry.author";
/**
* The value of the JMF registry property that determines whether we have
* initilized the currentl repository.
*/
private static final String PROP_REGISTRY_AUTHOR_VALUE
= "sip-communicator.org";
/**
* The name of the file that the JMF registry uses for storing and loading
* jmf properties.
*/
private static final String JMF_PROPERTIES_FILE_NAME = "jmf.properties";
/**
* Default constructor - does nothing.
*/
public JmfDeviceDetector()
{
}
/**
* Detect all capture devices
*/
private void initialize()
{
if (FMJConditionals.USE_JMF_INTERNAL_REGISTRY)
{
// This uses JMF internals:
// see if the registry has already been "tagged" by us, skip auto-detection if
// it has.
// This was probably done because JMF auto-detection is very slow, especially
// for video devices. FMJ does this quickly, so there is no need for this
// kind of workaround (besides the fact that these internal functions are not
// implemented in FMJ).
String author = (String)Registry.get(PROP_REGISTRY_AUTHOR);
if(author != null)
{
return;
}
Registry.set(PROP_ALLOW_CAPTURE_FROM_APPLETS, new Boolean(true));
Registry.set(PROP_ALLOW_SAVE_FILE_FROM_APPLETS, new Boolean(true));
Registry.set(PROP_REGISTRY_AUTHOR, PROP_REGISTRY_AUTHOR_VALUE);
try
{
Registry.commit();
}
catch (Exception exc)
{
logger.error(
"Failed to initially commit JMFRegistry. Ignoring err."
, exc);
}
}
// Issues #693 and #524:
// Disable DirectSound since it fails on multiple installations
//
// detectDirectAudio();
// detectS8DirectAudio();
detectCaptureDevices();
}
/**
* Detect all existing capture devices and record them into the JMF
* repository.
*/
private void detectCaptureDevices()
{
logger.info("Looking for Audio capturer");
// check if JavaSound capture is available
try
{
new JavaSoundAuto();
}
catch (Throwable exc)
{
logger.debug("No JMF javasound detected: " + exc.getMessage());
}
// check if we have FMJJavaSoundAuto capture is available
try
{
new FMJJavaSoundAuto();
}
catch (Throwable exc)
{
logger.debug("No FMJ javasound detected: " + exc.getMessage());
}
try
{
new PortAudioAuto();
}
catch (Throwable exc)
{
logger.info("No portaudio detected: " + exc.getMessage());
}
// after javasound and portaudio eventually add them to available
// audio systems, lets add option None, in order to be able to
// disable audio
DeviceConfiguration.addAudioSystem(
DeviceConfiguration.AUDIO_SYSTEM_NONE);
// video is enabled by default
// if video is disabled skip device detection
if (NeomediaActivator
.getConfigurationService()
.getBoolean(
MediaServiceImpl.DISABLE_VIDEO_SUPPORT_PROPERTY_NAME,
false))
return;
// Try to configgure capture devices for any operating system.
//those that do not apply will silently fail.
logger.info("Looking for video capture devices");
/* int nDevices = 0;
//Windows
try
{
VFWAuto vfwAuto = new VFWAuto();
vfwAuto.autoDetectDevices();
logger.info("Detected "
+ nDevices
+" VFW video capture device(s).");
}
catch (Throwable exc)
{
logger.debug("No VFW video detected: " + exc.getMessage());
}
//SunVideo
try
{
SunVideoAuto sunVideoAuto = new SunVideoAuto();
nDevices = sunVideoAuto.autoDetectDevices();
logger.info("Detected "
+ nDevices
+" SUN Video capture device(s).");
}
catch (Throwable exc)
{
logger.debug("No SUN Video detected: " + exc.getMessage());
}
//SunVideoPlus
try
{
SunVideoPlusAuto sunVideoAutoPlus = new SunVideoPlusAuto();
nDevices = sunVideoAutoPlus.autoDetectDevices();
logger.info("Detected "
+ nDevices
+ " SUN Video Plus device(s).");
}
catch (Throwable exc)
{
logger.debug("No SUN Video Plus detected: " + exc.getMessage());
}
//Linux
try
{
V4LAuto v4lAuto = new V4LAuto();
nDevices = v4lAuto.autoDetectDevices();
logger.info("Detected "
+ nDevices
+" V4L video capture device.");
}
catch (Throwable exc)
{
logger.debug("No V4l video detected: " + exc.getMessage());
}
*/
//FMJ
try
{
if(isFMJVideoAvailable())
new FMJCivilVideoAuto();
}
catch (Throwable exc)
{
logger.debug("No FMJ CIVIL video detected: " + exc.getMessage(), exc);
}
}
/**
* Currently fmj video under macosx using java version 1.6 is not supported.
* As macosx video support is using libQTJNative.jnilib which supports
* only java 1.5 and is deprecated.
* @return is fmj video supported under current OS and environment.
*/
private boolean isFMJVideoAvailable()
{
String osName = System.getProperty("os.name");
if (osName.startsWith("Mac") &&
System.getProperty("java.version").startsWith("1.6"))
{
return false;
}
return true;
}
/**
* Will try to detect direct audio devices.
*/
@SuppressWarnings("unchecked") //legacy JMF code.
private void detectDirectAudio()
{
Class<?> cls;
int plType = PlugInManager.RENDERER;
String dar = "com.sun.media.renderer.audio.DirectAudioRenderer";
try
{
// Check if this is the Windows Performance Pack - hack
cls = Class.forName(
"net.java.sip.communicator.impl.media.device.VFWAuto");
// Check if DS capture is supported, otherwise fail DS renderer
// since NT doesn't have capture
cls = Class.forName("com.sun.media.protocol.dsound.DSound");
// Find the renderer class and instantiate it.
cls = Class.forName(dar);
Renderer rend = (Renderer) cls.newInstance();
try
{
// Set the format and open the device
AudioFormat af = new AudioFormat(AudioFormat.LINEAR,
44100, 16, 2);
rend.setInputFormat(af);
rend.open();
Format[] inputFormats = rend.getSupportedInputFormats();
// Register the device
PlugInManager.addPlugIn(dar, inputFormats, new Format[0],
plType);
// Move it to the top of the list
Vector<String> rendList =
PlugInManager.getPlugInList(null, null, plType);
int listSize = rendList.size();
if (rendList.elementAt(listSize - 1).equals(dar))
{
rendList.removeElementAt(listSize - 1);
rendList.insertElementAt(dar, 0);
PlugInManager.setPlugInList(rendList, plType);
PlugInManager.commit();
//System.err.println("registered");
}
rend.close();
}
catch (Throwable throwable)
{
logger.debug("Detection for direct audio failed.", throwable);
}
}
catch (Throwable tt)
{
logger.debug("Detection for direct audio failed.", tt);
}
}
/**
* Runs JMFInit the first time the application is started so that capture
* devices are properly detected and initialized by JMF.
*/
public static void setupJMF()
{
logger.logEntry();
try
{
// we'll be storing our jmf.properties file inside the
//sip-communicator directory. If it does not exist - we created it.
//If the jmf.properties file has 0 length then this is the first
//time we're running and should detect capture devices
File jmfPropsFile = null;
try
{
FileAccessService faService
= NeomediaActivator.getFileAccessService();
if(faService != null)
{
jmfPropsFile = faService.
getPrivatePersistentFile(JMF_PROPERTIES_FILE_NAME);
}
//small hack for when running from outside oscar
else
{
jmfPropsFile
= new File(System.getProperty("user.home")
+ File.separator
+ ".sip-communicator/jmf.properties");
}
//force reinitialization
if(jmfPropsFile.exists())
jmfPropsFile.delete();
jmfPropsFile.createNewFile();
}
catch (Exception exc)
{
throw new RuntimeException(
"Failed to create the jmf.properties file.", exc);
}
String classpath = System.getProperty("java.class.path");
classpath = jmfPropsFile.getParentFile().getAbsolutePath()
+ System.getProperty("path.separator")
+ classpath;
System.setProperty("java.class.path", classpath);
/** @todo run this only if necessary and in parallel. Right now
* we're running detection no matter what. We should be more
* intelligent and detect somehow whether new devices are present
* before we run our detection tests.*/
JmfDeviceDetector detector = new JmfDeviceDetector();
detector.initialize();
}
finally
{
logger.logExit();
}
setupRenderers();
}
@SuppressWarnings("unchecked") //legacy JMF code.
private static void setupRenderers()
{
if (isWindowsVista())
{
/*
* DDRenderer will cause Windows Vista to switch its theme from Aero
* to Vista Basic so try to pick up a different Renderer.
*/
Vector<String> renderers =
PlugInManager.getPlugInList(null, null, PlugInManager.RENDERER);
if (renderers.contains("com.sun.media.renderer.video.GDIRenderer"))
{
PlugInManager.removePlugIn(
"com.sun.media.renderer.video.DDRenderer",
PlugInManager.RENDERER);
}
}
else if(!isLinux32())
{
Vector<String> renderers =
PlugInManager.getPlugInList(null, null, PlugInManager.RENDERER);
if (renderers.contains("com.sun.media.renderer.video.LightWeightRenderer") ||
renderers.contains("com.sun.media.renderer.video.AWTRenderer"))
{
// remove xlib renderer cause its native one and jmf is supported
// only on 32bit machines
PlugInManager.removePlugIn(
"com.sun.media.renderer.video.XLibRenderer",
PlugInManager.RENDERER);
}
}
}
private static boolean isWindowsVista()
{
String osName = System.getProperty("os.name");
/*
* TODO We're currently checking for Vista only but it may make sense to
* check for a version of Windows greater than or equal to Vista.
*/
return (osName != null) && (osName.indexOf("Windows") != -1)
&& (osName.indexOf("Vista") != -1);
}
private static boolean isLinux32()
{
String osName = System.getProperty("os.name");
String arch = System.getProperty("sun.arch.data.model");
return (osName != null) && (arch != null) && (osName.indexOf("Linux") != -1)
&& (arch.indexOf("32") != -1);
}
/**
* Detect all devices and complete
*/
public static void detectAndConfigureCaptureDevices()
{
setupJMF();
}
}

@ -0,0 +1,78 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import java.util.*;
import javax.media.*;
import net.java.sip.communicator.impl.media.protocol.portaudio.*;
/**
* Creates PortAudio capture devices by enumerating all the host devices that
* has input channels.
*
* @author Damian Minkov
*/
public class PortAudioAuto
{
/**
* An array of the devices that can be used for playback.
*/
public static CaptureDeviceInfo[] playbackDevices = null;
PortAudioAuto() throws Exception
{
// if PortAudio has a problem initializing like missing native
// components it will trow exception here and PortAudio rendering will
// not be inited.
PortAudio.initialize();
int deviceCount = PortAudio.Pa_GetDeviceCount();
int deviceIndex = 0;
Vector<CaptureDeviceInfo> playbackDevVector =
new Vector<CaptureDeviceInfo>();
for (; deviceIndex < deviceCount; deviceIndex++)
{
long deviceInfo = PortAudio.Pa_GetDeviceInfo(deviceIndex);
int maxInputChannels =
PortAudio.PaDeviceInfo_getMaxInputChannels(deviceInfo);
int maxOutputChannels =
PortAudio.PaDeviceInfo_getMaxOutputChannels(deviceInfo);
CaptureDeviceInfo jmfInfo =
new CaptureDeviceInfo(
PortAudio.PaDeviceInfo_getName(deviceInfo),
new MediaLocator(
PortAudioStream.LOCATOR_PREFIX + deviceIndex),
PortAudioStream.getFormats());
if(maxInputChannels > 0)
{
CaptureDeviceManager.addDevice(jmfInfo);
}
if(maxOutputChannels > 0)
{
playbackDevVector.add(jmfInfo);
}
}
playbackDevices = playbackDevVector.toArray(new CaptureDeviceInfo[0]);
CaptureDeviceManager.commit();
// Enables Portaudio Renderer
DeviceConfiguration.initPortAudioRenderer();
// now add it as available audio system to DeviceConfiguration
DeviceConfiguration.addAudioSystem(
DeviceConfiguration.AUDIO_SYSTEM_PORTAUDIO);
}
}

@ -0,0 +1,135 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import javax.media.*;
import net.java.sip.communicator.util.*;
/**
* A utility class that provides utility functions when working with processors.
*
* @author Emil Ivov
* @author Ken Larson
*/
public class ProcessorUtility implements ControllerListener
{
private final Logger logger = Logger.getLogger(ProcessorUtility.class);
/**
* The object that we use for syncing when waiting for a processor
* to enter a specific state.
*/
private final Object stateLock = new Object();
private boolean failed = false;
/**
* Default constructor, creates an instance of the of the Processor utility.
*/
public ProcessorUtility()
{
}
/**
* Returns the object that we use for syncing when waiting for a processor
* to enter a specific state.
* @return Integer
*/
private Object getStateLock()
{
return stateLock;
}
/**
* Specifies whether the wait operation has failed or completed with
* success.
*
* @param failed <tt>true</tt> if waiting has failed; <tt>false</tt>,
* otherwise
*/
private void setFailed(boolean failed)
{
this.failed = failed;
}
/**
* This method is called when an event is generated by a
* <code>Controller</code> that this listener is registered with. We use
* the event to notify all waiting on our lock and record success or
* failure.
*
* @param ce The event generated.
*/
public void controllerUpdate(ControllerEvent ce)
{
// If there was an error during configure or
// realize, the processor will be closed
if (ce instanceof ControllerClosedEvent)
{
if (ce instanceof ControllerErrorEvent)
logger.warn("ControllerErrorEvent: " + ce);
else
logger.debug("ControllerClosedEvent: " + ce);
setFailed(true);
// All controller events, send a notification
// to the waiting thread in waitForState method.
}
Object stateLock = getStateLock();
synchronized (stateLock)
{
stateLock.notifyAll();
}
}
/**
* Waits until <tt>processor</tt> enters state and returns a boolean
* indicating success or failure of the operation.
*
* @param processor Processor
* @param state one of the Processor.XXXed state vars
* @return <tt>true</tt> if the state has been reached; <tt>false</tt>,
* otherwise
*/
public synchronized boolean waitForState(Processor processor, int state)
{
processor.addControllerListener(this);
setFailed(false);
// Call the required method on the processor
if (state == Processor.Configured)
processor.configure();
else if (state == Processor.Realized)
processor.realize();
// Wait until we get an event that confirms the
// success of the method, or a failure event.
// See StateListener inner class
while ((processor.getState() < state) &&!failed)
{
Object stateLock = getStateLock();
synchronized (stateLock)
{
try
{
stateLock.wait();
}
catch (InterruptedException ie)
{
return false;
}
}
}
return !failed;
}
}

@ -183,6 +183,11 @@ public String getEncoding()
return format.getEncoding();
}
public T getFormat()
{
return format;
}
/*
* Implements MediaFormat#getFormatParameters(). Returns a copy of the
* format properties of this instance. Modifications to the returned Map do

@ -0,0 +1,43 @@
Bundle-Activator: net.java.sip.communicator.impl.neomedia.NeomediaActivator
Bundle-Name: Neomedia Service Implementation
Bundle-Description: A bundle that offers Media capture and presentation capabilities.
Bundle-Vendor: sip-communicator.org
Bundle-Version: 0.0.1
System-Bundle: yes
Import-Package: org.osgi.framework,
org.xml.sax,
org.bouncycastle.crypto,
org.bouncycastle.crypto.digests,
org.bouncycastle.crypto.macs,
org.bouncycastle.crypto.params,
org.bouncycastle.crypto.engines,
javax.imageio,
javax.sound,
javax.sound.sampled,
javax.swing,
javax.swing.border,
javax.swing.event,
javax.swing.table,
net.java.sip.communicator.service.configuration,
net.java.sip.communicator.service.configuration.event,
net.java.sip.communicator.service.fileaccess,
net.java.sip.communicator.service.gui,
net.java.sip.communicator.service.netaddr,
net.java.sip.communicator.service.protocol,
net.java.sip.communicator.service.protocol.event,
net.java.sip.communicator.service.resources,
net.java.sip.communicator.util,
net.java.sip.communicator.util.swing,
quicktime,
quicktime.std.sg,
quicktime.qd,
quicktime.util,
quicktime.std.image,
gnu.java.zrtp,
gnu.java.zrtp.packets,
gnu.java.zrtp.utils,
gnu.java.zrtp.zidfile
Export-Package: net.java.sip.communicator.service.neomedia,
net.java.sip.communicator.service.neomedia.device,
net.java.sip.communicator.service.neomedia.event,
net.java.sip.communicator.service.neomedia.format

@ -77,9 +77,9 @@ public interface AudioMediaStream
* being fed from this stream's <tt>MediaDevice</tt> and transmit silence
* instead.
*
* @param on <tt>true</tt> if we are to start transmitting silence and
* @param mute <tt>true</tt> if we are to start transmitting silence and
* <tt>false</tt> if we are to use media from this stream's
* <tt>MediaDevice</tt> again.
*/
public void setMute(boolean on);
public void setMute(boolean mute);
}

@ -0,0 +1,203 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.service.neomedia;
import java.net.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.service.configuration.*;
import net.java.sip.communicator.util.*;
/**
* Represents a default implementation of <tt>StreamConnector</tt> which is
* initialized with a specific pair of control and data <tt>DatagramSocket</tt>s
* and which closes them (if they exist) when its {@link #close()} is invoked.
*
* @author Lubomir Marinov
*/
public class DefaultStreamConnector
implements StreamConnector
{
private static final Logger logger
= Logger.getLogger(DefaultStreamConnector.class);
/**
* The default number of binds that a Media Service Implementation should
* execute in case a port is already bound to (each retry would be on a
* new random port).
*/
public static final int BIND_RETRIES_DEFAULT_VALUE = 50;
/**
* The name of the property containing the number of binds that a Media
* Service Implementation should execute in case a port is already
* bound to (each retry would be on a new port in the allowed boundaries).
*/
public static final String BIND_RETRIES_PROPERTY_NAME
= "net.java.sip.communicator.service.media.BIND_RETRIES";
/**
* The name of the property that contains the maximum port number that we'd
* like our RTP managers to bind upon.
*/
public static final String MAX_PORT_NUMBER_PROPERTY_NAME
= "net.java.sip.communicator.service.media.MAX_PORT_NUMBER";
private static int maxPort = -1;
/**
* The name of the property that contains the minimum port number that we'd
* like our RTP managers to bind upon.
*/
public static final String MIN_PORT_NUMBER_PROPERTY_NAME
= "net.java.sip.communicator.service.media.MIN_PORT_NUMBER";
private static int minPort = -1;
private static synchronized DatagramSocket createDatagramSocket(
InetAddress bindAddr)
{
ConfigurationService config
= NeomediaActivator.getConfigurationService();
int bindRetries
= config
.getInt(BIND_RETRIES_PROPERTY_NAME, BIND_RETRIES_DEFAULT_VALUE);
if (maxPort < 0)
maxPort = config.getInt(MAX_PORT_NUMBER_PROPERTY_NAME, 6000);
for (int i = 0; i < bindRetries; i++)
{
if ((minPort < 0) || (minPort > maxPort))
minPort = config.getInt(MIN_PORT_NUMBER_PROPERTY_NAME, 5000);
int port = minPort++;
try
{
return new DatagramSocket(port, bindAddr);
}
catch (SocketException se)
{
logger
.warn(
"Retrying a bind because of a failure to bind to address "
+ bindAddr
+ " and port "
+ port,
se);
}
}
return null;
}
private final InetAddress bindAddr;
/**
* The <tt>DatagramSocket</tt> that a stream should use for control data
* (e.g. RTCP) traffic.
*/
protected DatagramSocket controlSocket;
/**
* The <tt>DatagramSocket</tt> that a stream should use for data (e.g. RTP)
* traffic.
*/
protected DatagramSocket dataSocket;
/**
* Initializes a new <tt>DefaultStreamConnector</tt> instance with no
* control and data <tt>DatagramSocket</tt>s.
* <p>
* Suitable for extenders willing to delay the creation of the control and
* data sockets. For example, they could override
* {@link #getControlSocket()} and/or {@link #getDataSocket()} and create
* them on demand.
*/
public DefaultStreamConnector()
{
this(null, null);
}
/**
* Initializes a new <tt>DefaultStreamConnector</tt> instance with a
* specific bind <tt>InetAddress</tt>. The new instance is to attempt to
* bind on demand to the specified <tt>InetAddress</tt> in the port range
* defined by the <tt>ConfigurationService</tt> properties
* {@link #MIN_PORT_NUMBER_PROPERTY_NAME} and
* {@link #MAX_PORT_NUMBER_PROPERTY_NAME} at most
* {@link #BIND_RETRIES_PROPERTY_NAME} times.
*
* @param bindAddr
*/
public DefaultStreamConnector(InetAddress bindAddr)
{
this.bindAddr = bindAddr;
}
/**
* Initializes a new <tt>DefaultStreamConnector</tt> instance which is to
* represent a specific pair of control and data <tt>DatagramSocket</tt>s.
*
* @param controlSocket the <tt>DatagramSocket</tt> to be used for control
* data (e.g. RTCP) traffic
* @param dataSocket the <tt>DatagramSocket</tt> to be used for data (e.g.
* RTP) traffic
*/
public DefaultStreamConnector(
DatagramSocket controlSocket,
DatagramSocket dataSocket)
{
this.controlSocket = controlSocket;
this.dataSocket = dataSocket;
this.bindAddr = null;
}
/*
* Implements StreamConnector#close().
*/
public void close()
{
if (controlSocket != null)
controlSocket.close();
if (dataSocket != null)
dataSocket.close();
}
/*
* Implements StreamConnector#getControlSocket().
*/
public DatagramSocket getControlSocket()
{
if ((controlSocket == null) && (bindAddr != null))
controlSocket = createDatagramSocket(bindAddr);
return controlSocket;
}
/*
* Implements StreamConnector#getDataSocket().
*/
public DatagramSocket getDataSocket()
{
if ((dataSocket == null) && (bindAddr != null))
dataSocket = createDatagramSocket(bindAddr);
return dataSocket;
}
/*
* Implements StreamConnector#started(). Does nothing.
*/
public void started()
{
}
/*
* Implements StreamConnector#stopped(). Does nothing.
*/
public void stopped()
{
}
}

@ -22,34 +22,6 @@
*/
public interface MediaStream
{
/**
* The name of the property containing the number of binds that a Media
* Service Implementation should execute in case a port is already
* bound to (each retry would be on a new port in the allowed boundaries).
*/
public static final String BIND_RETRIES_PROPERTY_NAME
= "net.java.sip.communicator.service.media.BIND_RETRIES";
/**
* The name of the property that contains the minimum port number that we'd
* like our RTP managers to bind upon.
*/
public static final String MIN_PORT_NUMBER_PROPERTY_NAME
= "net.java.sip.communicator.service.media.MIN_PORT_NUMBER";
/**
* The name of the property that contains the maximum port number that we'd
* like our RTP managers to bind upon.
*/
public static final String MAX_PORT_NUMBER_PROPERTY_NAME
= "net.java.sip.communicator.service.media.MAX_PORT_NUMBER";
/**
* The default number of binds that a Media Service Implementation should
* execute in case a port is already bound to (each retry would be on a
* new random port).
*/
public static final int BIND_RETRIES_DEFAULT_VALUE = 50;
/**
* The name of the property which indicates whether the remote SSRC is
@ -82,6 +54,12 @@ public interface MediaStream
*/
public void stop();
/**
* Releases the resources allocated by this instance in the course of its
* execution and prepares it to be garbage collected.
*/
public void close();
/**
* Sets the MediaFormat that this <tt>MediaStream</tt> should transmit in.
*
@ -169,4 +147,6 @@ public interface MediaStream
* @param listener the listener that we'd like to remove.
*/
public void removePropertyChangeListener(PropertyChangeListener listener);
public void setTarget(MediaStreamTarget target);
}

@ -38,4 +38,23 @@ public interface StreamConnector
* use for control data (e.g. RTCP).
*/
public DatagramSocket getControlSocket();
/**
* Releases the resources allocated by this instance in the course of its
* execution and prepares it to be garbage collected.
*/
public void close();
/**
* Notifies this instance that utilization of its <tt>DatagramSocket</tt>s
* for data and/or control traffic has started.
*/
public void started();
/**
* Notifies this instance that utilization of its <tt>DatagramSocket</tt>s
* for data and/or control traffic has temporarily stopped. This instance
* should be prepared to be started at a later time again though.
*/
public void stopped();
}

@ -8,7 +8,7 @@
import java.awt.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.service.neomedia.event.*;
/**
* Extends the <tt>MediaStream</tt> interface and adds methods specific to
@ -37,7 +37,7 @@ public interface VideoMediaStream
*
* @param listener the <tt>VideoListener</tt> to be notified when
* visual/video <tt>Component</tt>s are being added or removed in this
* <tt>CallSession</tt>
* <tt>VideoMediaStream</tt>
*/
public void addVideoListener(VideoListener listener);
}

Loading…
Cancel
Save