Fix some javadoc warnings.

cusax-fix
Sebastien Vincent 16 years ago
parent 5ada14242a
commit dde39782e1

@ -28,8 +28,8 @@ public static class CaptureDevice
{
/**
* Compares two CaptureDeviceInfo
* @param a
* @param b
* @param a first <tt>CaptureDeviceInfo</tt> to compare
* @param b second <tt>CaptureDeviceInfo</tt> to compare
* @return whether a is equal to b
*/
public static boolean equals(CaptureDeviceInfo a, CaptureDeviceInfo b)
@ -206,7 +206,7 @@ private CaptureDevice[] getDevices()
/**
* Extracts the devices selected by the configuration.
* @return
* @return <tt>CaptureDevice</tt> selected
*/
private CaptureDevice getSelectedDevice()
{

@ -9,13 +9,16 @@
import javax.media.format.*;
/**
* Class to centralize workarounds or changes that need to be made for FMJ to work.
* This is also a place to tweak which workarounds are used.
* Class to centralize workarounds or changes that need to be made for FMJ to
* work. This is also a place to tweak which workarounds are used.
*
* @author Ken Larson
*/
public class FMJConditionals
{
/**
* If FMJ is used.
*/
public static final boolean IS_FMJ = false;
/**

@ -20,11 +20,11 @@
* devices and uses this data to seed the Fortuna PRNG. The ZrtpFortuna PRNG
* is a singleton and all other methods that require random data shall use
* this singleton.
*
* Use GatherEntropy during startup and initialization phase of SIP
*
* Use GatherEntropy during startup and initialization phase of SIP
* Communicator but after initialization of the media devices to get entropy
* data at the earliest point. Also make sure that entropy data is read from
* local sources only and that entropy data is never send out (via networks
* local sources only and that entropy data is never send out (via networks
* for example).
*
* @author Werner Dittmann <Werner.Dittmann@t-online.de>
@ -43,13 +43,13 @@ public class GatherEntropy
* Device config to look for capture devices.
*/
private final DeviceConfiguration deviceConfiguration;
/**
* Other methods shall/may check this to see if Fortuna was seeded with
* entropy.
*/
private static boolean entropyOk = false;
/**
* Number of gathered entropy bytes.
*/
@ -61,33 +61,33 @@ public class GatherEntropy
* data.
*/
private int bytesToGather = 0;
/**
* Bytes per 20ms time slice.
*/
private int bytes20ms = 0;
/**
* How many seconds of audio to read.
*
*
*/
private static final int NUM_OF_SECONDS = 2;
public GatherEntropy(DeviceConfiguration deviceConfiguration)
public GatherEntropy(DeviceConfiguration deviceConfiguration)
{
this.deviceConfiguration = deviceConfiguration;
}
/**
* Get status of entropy flag.
*
* @return Status if entropy was gathered and set in Fortuna PRNG.
*
* @return Status if entropy was gathered and set in Fortuna PRNG.
*/
public static boolean isEntropyOk()
{
return entropyOk;
}
/**
* @return the number of gathered entropy bytes.
*/
@ -97,11 +97,11 @@ protected int getGatheredEntropy()
}
/**
* Set entropy to ZrtpFortuna singleton.
*
* The methods reads entropy data and seeds the ZrtpFortuna singleton.
*
* The methods reads entropy data and seeds the ZrtpFortuna singleton.
* The methods seeds the first pool (0) of Fortuna to make sure that
* this entropy is always used.
*
*
* @return true if entropy data was available, false otherwise.
*/
public boolean setEntropy()
@ -118,7 +118,7 @@ private class GatherAudio extends Thread implements BufferTransferHandler
{
/**
* The PortAudio <tt>DataSource</tt> which provides
* {@link #portAudioStream}.
* {@link #audioStream}.
*/
private DataSource dataSource = null;
@ -134,18 +134,18 @@ private class GatherAudio extends Thread implements BufferTransferHandler
private final Buffer firstBuf = new Buffer();
private boolean bufferAvailable = false;
private final Object bufferSync = new Object();
/**
* Prepares to read entropy data from portaudio capture device.
*
* The method gets an PortAudio instance with a set of capture
*
* The method gets an PortAudio instance with a set of capture
* parameters.
*
* @return True if the PortAudio input stream is available.
*
* @return True if the PortAudio input stream is available.
*/
private boolean prepareAudioEntropy()
{
CaptureDeviceInfo audioCaptureDevice =
CaptureDeviceInfo audioCaptureDevice =
deviceConfiguration.getAudioCaptureDevice();
if (audioCaptureDevice == null)
return false;
@ -207,7 +207,7 @@ public void transferData(PushBufferStream stream)
}
/**
* Gather entropy from portaudio capture device and seed Fortuna PRNG.
*
*
* The method gathers a number of samples and seeds the Fortuna PRNG.
*/
@Override
@ -223,7 +223,7 @@ public void run()
dataSource.start();
int i = 0;
while (gatheredEntropy < bytesToGather)
while (gatheredEntropy < bytesToGather)
{
if (audioStream instanceof PushBufferStream)
{

@ -53,6 +53,9 @@ public class RTPConnectorOutputStream
protected final List<InetSocketAddress> targets
= new LinkedList<InetSocketAddress>();
/**
* List of available raw packets.
*/
private final LinkedBlockingQueue<RawPacket> availRawPackets
= new LinkedBlockingQueue<RawPacket>();
@ -90,7 +93,7 @@ public void close()
maxPacketsPerMillisPolicy = null;
removeTargets();
}
/**
* Creates a new <tt>RawPacket</tt> from a specific <tt>byte[]</tt> buffer
* in order to have this instance send its packet data through its
@ -109,10 +112,10 @@ public void close()
protected RawPacket createRawPacket(byte[] buffer, int offset, int length)
{
RawPacket pkt = availRawPackets.poll();
if (pkt == null || pkt.getBuffer().length < length)
if (pkt == null || pkt.getBuffer().length < length)
{
byte[] buf = new byte[length];
pkt = new RawPacket();
pkt = new RawPacket();
pkt.setBuffer(buf);
}
System.arraycopy(buffer, offset, pkt.getBuffer(), 0, length);
@ -223,10 +226,14 @@ public void setMaxPacketsPerMillis(int maxPackets, long perMillis)
/**
* Implements {@link OutputDataStream#write(byte[], int, int)}.
*
* @param buffer
* @param offset
* @param length
* @return
* @param buffer the <tt>byte[]</tt> that we'd like to copy the content
* of the packet to.
* @param offset the position where we are supposed to start writing in
* <tt>buffer</tt>.
* @param length the number of <tt>byte</tt>s available for writing in
* <tt>inBuffer</tt>.
*
* @return the number of bytes read
*/
public int write(byte[] buffer, int offset, int length)
{
@ -287,7 +294,8 @@ private class MaxPacketsPerMillisPolicy
* <tt>DatagramSocket</tt> of this <tt>OutputDataSource</tt>.
*/
private final ArrayBlockingQueue<RawPacket> packetQueue
= new ArrayBlockingQueue<RawPacket>(MAX_PACKETS_PER_MILLIS_POLICY_PACKET_QUEUE_CAPACITY);
= new ArrayBlockingQueue<RawPacket>(
MAX_PACKETS_PER_MILLIS_POLICY_PACKET_QUEUE_CAPACITY);
/**
* The number of RTP packets already sent during the current
@ -308,7 +316,7 @@ private class MaxPacketsPerMillisPolicy
* <tt>OutputDataSource</tt>.
*/
private Thread sendThread;
/**
* To signal run or stop condition to send thread.
*/
@ -347,7 +355,10 @@ public void run()
}
}
}
/**
* Closes the connector.
*/
synchronized void close()
{
if (!sendRun)
@ -384,7 +395,7 @@ private void runInSendThread()
}
if (!sendRun)
break;
long time = System.nanoTime();
long millisRemainingTime = time - millisStartTime;
@ -397,7 +408,7 @@ private void runInSendThread()
else if ((maxPackets > 0)
&& (packetsSentInMillis >= maxPackets))
{
while (true)
while (true)
{
millisRemainingTime = System.nanoTime()
- millisStartTime;
@ -463,7 +474,7 @@ public void setMaxPacketsPerMillis(int maxPackets, long perMillis)
*/
public void write(RawPacket packet)
{
while (true)
while (true)
{
try
{

@ -71,6 +71,7 @@ protected void finalize()
/**
* Frees the memory pointed to by the <tt>data0</tt> member of the native
* <tt>AVFrame</tt>.
* @param data0 pointer to free
*/
protected void freeData0(long data0)
{

@ -25,12 +25,20 @@
public class Packetizer
extends AbstractPacketizer
{
/**
* Array of default output formats.
*/
private static final Format[] DEFAULT_OUTPUT_FORMATS
= { new VideoFormat(Constants.H264_RTP) };
// without the headers
/**
* Maximum payload size without the headers.
*/
public static final int MAX_PAYLOAD_SIZE = 1024;
/**
* Name of the plugin.
*/
private static final String PLUGIN_NAME = "H264 Packetizer";
/**
@ -62,6 +70,9 @@ public Packetizer()
outputFormat = null;
}
/**
* Close this <tt>Packetizer</tt>.
*/
@Override
public synchronized void close()
{
@ -100,6 +111,12 @@ private static int ff_avc_find_startcode(
return endIndex;
}
/**
* Get the matching output formats for a specific format.
*
* @param in input format
* @return array for formats matching input format
*/
private Format[] getMatchingOutputFormats(Format in)
{
VideoFormat videoIn = (VideoFormat) in;
@ -116,7 +133,11 @@ private Format[] getMatchingOutputFormats(Format in)
videoIn.getFrameRate())
};
}
/**
* Get codec name.
*
* @return codec name
*/
@Override
public String getName()
{
@ -125,6 +146,9 @@ public String getName()
/**
* Return the list of formats supported at the output.
* @param in input <tt>Format</tt> to determine corresponding output
* <tt>Format/tt>s
* @return array of formats supported at output
*/
public Format[] getSupportedOutputFormats(Format in)
{
@ -140,6 +164,9 @@ public Format[] getSupportedOutputFormats(Format in)
return getMatchingOutputFormats(in);
}
/**
* Open this <tt>Packetizer</tt>.
*/
@Override
public synchronized void open()
throws ResourceUnavailableException
@ -245,6 +272,14 @@ private boolean packetizeNAL(byte[] nal, int nalOffset, int nalLength)
return nalsAdded;
}
/**
* Processes (packetize) a buffer.
*
* @param inBuffer input buffer
* @param outBuffer output buffer
* @return <tt>BUFFER_PROCESSED_OK</tt> if buffer has been successfully
* processed
*/
@Override
public int process(Buffer inBuffer, Buffer outBuffer)
{
@ -364,6 +399,12 @@ public int process(Buffer inBuffer, Buffer outBuffer)
nalsAdded ? process(inBuffer, outBuffer) : OUTPUT_BUFFER_NOT_FILLED;
}
/**
* Sets the input format.
*
* @param in format to set
* @return format
*/
@Override
public Format setInputFormat(Format in)
{
@ -379,6 +420,16 @@ public Format setInputFormat(Format in)
return in;
}
/**
* Sets the <tt>Format</tt> in which this <tt>Codec</tt> is to output media
* data.
*
* @param out the <tt>Format</tt> in which this <tt>Codec</tt> is to
* output media data
* @return the <tt>Format</tt> in which this <tt>Codec</tt> is currently
* configured to output media data or <tt>null</tt> if <tt>format</tt> was
* found to be incompatible with this <tt>Codec</tt>
*/
@Override
public Format setOutputFormat(Format out)
{

@ -41,7 +41,7 @@
* multiple output audio streams at one and the same time, though, each of them
* containing the mix of a subset of the input audio streams.
* </p>
*
*
* @author Lubomir Marinov
*/
public class AudioMixer
@ -131,7 +131,7 @@ public class AudioMixer
* <tt>AudioMixingPushBufferDataSource</tt>s it creates. The specified
* <tt>CaptureDevice</tt> is also added as the first input
* <tt>DataSource</tt> of the new instance.
*
*
* @param captureDevice the <tt>CaptureDevice</tt> capabilities to be
* provided to the <tt>AudioMixingPushBufferDataSource</tt>s created by the
* new instance and its first input <tt>DataSource</tt>
@ -167,7 +167,7 @@ public AudioMixer(CaptureDevice captureDevice)
* <tt>DataSource</tt>s from which this instance reads audio. If the
* specified <tt>DataSource</tt> indeed provides audio, the respective
* contributions to the mix are always included.
*
*
* @param inputDataSource a new <tt>DataSource</tt> to input audio to this
* instance
*/
@ -182,7 +182,7 @@ public void addInputDataSource(DataSource inputDataSource)
* specified <tt>DataSource</tt> indeed provides audio, the respective
* contributions to the mix will be excluded from the mix output provided
* through a specific <tt>AudioMixingPushBufferDataSource</tt>.
*
*
* @param inputDataSource a new <tt>DataSource</tt> to input audio to this
* instance
* @param outputDataSource the <tt>AudioMixingPushBufferDataSource</tt> to
@ -258,8 +258,8 @@ void addInputDataSource(
* <tt>AudioMixingPushBufferDataSource</tt>s reading from this
* <tt>AudioMixer</tt> which gets connected causes it to connect to the
* input <tt>DataSource</tt>s it manages.
*
* @throws IOException
*
* @throws IOException if input/output error occurred
*/
void connect()
throws IOException
@ -377,7 +377,7 @@ private InputStreamDesc createInputStreamDesc(
* new input <tt>DataSources</tt> in this <tt>AudioMixer</tt> but
* have their contributions not included in the mix available through the
* returned <tt>AudioMixingPushBufferDataSource</tt>.
*
*
* @return a new <tt>AudioMixingPushBufferDataSource</tt> which gives access
* to a single audio stream representing the mix of the audio streams input
* into this <tt>AudioMixer</tt> through its input <tt>DataSource</tt>s
@ -391,7 +391,7 @@ public AudioMixingPushBufferDataSource createOutputDataSource()
* Creates a <tt>DataSource</tt> which attempts to transcode the tracks of a
* specific input <tt>DataSource</tt> into a specific output
* <tt>Format</tt>.
*
*
* @param inputDataSourceDesc the <tt>InputDataSourceDesc</tt> describing
* the input <tt>DataSource</tt> to be transcoded into the specified output
* <tt>Format</tt> and to receive the transcoding <tt>DataSource</tt>
@ -477,7 +477,7 @@ BufferControl getBufferControl()
* Gets the <tt>CaptureDeviceInfo</tt> of the <tt>CaptureDevice</tt>
* this <tt>AudioMixer</tt> provides through its output
* <tt>AudioMixingPushBufferDataSource</tt>s.
*
*
* @return the <tt>CaptureDeviceInfo</tt> of the <tt>CaptureDevice</tt> this
* <tt>AudioMixer</tt> provides through its output
* <tt>AudioMixingPushBufferDataSource</tt>s
@ -489,7 +489,7 @@ CaptureDeviceInfo getCaptureDeviceInfo()
/**
* Gets the content type of the data output by this <tt>AudioMixer</tt>.
*
*
* @return the content type of the data output by this <tt>AudioMixer</tt>
*/
String getContentType()
@ -544,7 +544,7 @@ private InputStreamDesc getExistingInputStreamDesc(
/**
* Gets the duration of each one of the output streams produced by this
* <tt>AudioMixer</tt>.
*
*
* @return the duration of each one of the output streams produced by this
* <tt>AudioMixer</tt>
*/
@ -556,7 +556,7 @@ Time getDuration()
/**
* Gets the <tt>Format</tt> in which a specific <tt>DataSource</tt>
* provides stream data.
*
*
* @param dataSource the <tt>DataSource</tt> for which the <tt>Format</tt>
* in which it provides stream data is to be determined
* @return the <tt>Format</tt> in which the specified <tt>dataSource</tt>
@ -574,7 +574,7 @@ private static Format getFormat(DataSource dataSource)
/**
* Gets the <tt>Format</tt> in which a specific
* <tt>SourceStream</tt> provides data.
*
*
* @param stream
* the <tt>SourceStream</tt> for which the
* <tt>Format</tt> in which it provides data is to be
@ -596,7 +596,7 @@ private static Format getFormat(SourceStream stream)
* Gets an array of <tt>FormatControl</tt>s for the
* <tt>CaptureDevice</tt> this <tt>AudioMixer</tt> provides through
* its output <tt>AudioMixingPushBufferDataSource</tt>s.
*
*
* @return an array of <tt>FormatControl</tt>s for the
* <tt>CaptureDevice</tt> this <tt>AudioMixer</tt> provides
* through its output <tt>AudioMixingPushBufferDataSource</tt>s
@ -621,7 +621,7 @@ FormatControl[] getFormatControls()
* <tt>InputStreamDesc</tt>) of a specific <tt>DataSource</tt>
* (provided in the form of <tt>InputDataSourceDesc</tt>) which produce
* data in a specific <tt>AudioFormat</tt> (or a matching one).
*
*
* @param inputDataSourceDesc
* the <tt>DataSource</tt> (in the form of
* <tt>InputDataSourceDesc</tt>) which is to be examined for
@ -739,7 +739,7 @@ && matches(inputFormat, outputFormat))
* <tt>DataSource</tt> does not have such <tt>SourceStream</tt>s, an attempt
* is made to transcode its tracks so that such <tt>SourceStream</tt>s can
* be retrieved from it after transcoding.
*
*
* @param outputFormat the <tt>AudioFormat</tt> in which the retrieved
* <tt>SourceStream</tt>s are to produce data
* @param existingInputStreams the <tt>SourceStream</tt>s which are already
@ -804,7 +804,7 @@ public AudioMixingPushBufferDataSource getLocalOutputDataSource()
* <tt>DataSource</tt>s of this <tt>AudioMixer</tt> can produce data
* and which is to be the output <tt>Format</tt> of this
* <tt>AudioMixer</tt>.
*
*
* @return the <tt>AudioFormat</tt> in which the input
* <tt>DataSource</tt>s of this <tt>AudioMixer</tt> can
* produce data and which is to be the output <tt>Format</tt> of
@ -874,7 +874,7 @@ private AudioFormat getOutputFormatFromInputDataSources()
* does not exist already, which reads data from the input
* <tt>DataSource</tt>s of this <tt>AudioMixer</tt> and pushes it to
* output <tt>AudioMixingPushBufferStream</tt>s for audio mixing.
*
*
* @return the <tt>AudioMixerPushBufferStream</tt> which reads data from
* the input <tt>DataSource</tt>s of this
* <tt>AudioMixer</tt> and pushes it to output
@ -923,7 +923,7 @@ AudioMixerPushBufferStream getOutputStream()
* <tt>Format</tt>, the only requirement for the specified
* <tt>Format</tt>s to match is for both of them to have one and the
* same encoding.
*
*
* @param input
* the <tt>Format</tt> for which it is required to determine
* whether it matches a specific <tt>Format</tt>
@ -1000,7 +1000,7 @@ public void removeInputDataSources(DataSourceFilter dataSourceFilter)
* format of the input <tt>DataSource</tt>s of this
* <tt>AudioMixer</tt> in an attempt to not have to perform explicit
* transcoding of the input <tt>SourceStream</tt>s.
*
*
* @param outputFormat
* the <tt>AudioFormat</tt> in which the input
* <tt>DataSource</tt>s of this <tt>AudioMixer</tt> are

@ -56,7 +56,7 @@ protected AudioMediaDeviceSession(AbstractMediaDevice device)
}
/**
* Called by {@link MediaDeviceSession#createPlayer(DataSource dataSource)}
* Called by {@link MediaDeviceSession#createPlayer(DataSource dataSource)}
* when the player associated with this session's
* <tt>ReceiveStream</tt> moves enters the <tt>Configured</tt> state, so
* we use the occasion to add our audio level effect.

@ -440,8 +440,8 @@ public CaptureDeviceInfo[] getAvailableAudioPlaybackDevices()
/**
* Gets the list of video capture devices which are available through this
* <code>DeviceConfiguration</code>, amongst which is
* {@link #getVideoCaptureDevice()} and represent acceptable values
* for {@link #setVideoCaptureDevice(CaptureDeviceInfo, boolean)}
* {@link #getVideoCaptureDevice(MediaUseCase)} and represent acceptable
* values for {@link #setVideoCaptureDevice(CaptureDeviceInfo, boolean)}
*
* @param useCase extract video capture devices that correspond to this
* <tt>MediaUseCase</tt>
@ -1007,7 +1007,6 @@ public boolean isDenoiseEnabled()
*/
private void registerCustomRenderers()
{
@SuppressWarnings("unchecked")
Vector<String> renderers
= PlugInManager.getPlugInList(null, null, PlugInManager.RENDERER);
boolean commit = false;
@ -1044,7 +1043,6 @@ private void registerCustomRenderers()
* are considered preferred.
*/
int pluginType = PlugInManager.RENDERER;
@SuppressWarnings("unchecked")
Vector<String> plugins
= PlugInManager.getPlugInList(null, null, pluginType);

@ -17,11 +17,10 @@
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.impl.neomedia.imgstreaming.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.imgstreaming.*;
/**
* Add ImageStreaming capture device.
*
*
* @author Sebastien Vincent
*/
public class ImageStreamingAuto
@ -94,7 +93,7 @@ public ImageStreamingAuto() throws Exception
new MediaLocator(
ImageStreamingUtils.LOCATOR_PROTOCOL + ":" + i),
formats);
/* add to JMF device manager */
CaptureDeviceManager.addDevice(devInfo);
i++;

@ -195,13 +195,13 @@ public class PortAudioAuto
* Gets a sample rate supported by a PortAudio device with a specific device
* index with which it is to be registered with JMF.
*
* @param <tt>true</tt> if the supported sample rate is to be retrieved for
* @param input <tt>true</tt> if the supported sample rate is to be retrieved for
* the PortAudio device with the specified device index as an input device
* or <tt>false</tt> for an output device
* @param deviceIndex the device index of the PortAudio device for which a
* supported sample rate is to be retrieved
* @param channelCount
* @param sampleFormat
* @param channelCount number of channel
* @param sampleFormat sample format
* @return a sample rate supported by the PortAudio device with the
* specified device index with which it is to be registered with JMF
*/

@ -1276,11 +1276,11 @@ public PlayerScaler(Player player)
* Determines when the input video sizes changes and reports it as a
* <tt>SizeChangeVideoEvent</tt> because <tt>Player</tt> is unable to
* do it when this <tt>SwScaler</tt> is scaling to a specific
* <tt>outputSize</tt>.
* <tt>outputSize</tt>.
*
* @param input
* @param output
* @return
* @param input input buffer
* @param output output buffer
* @return the native <tt>PaSampleFormat</tt>
* @see SwScaler#process(Buffer, Buffer)
*/
@Override
@ -1313,8 +1313,8 @@ public int process(Buffer input, Buffer output)
* Ensures that this <tt>SwScaler</tt> preserves the aspect ratio of its
* input video when scaling.
*
* @param inputFormat
* @return
* @param inputFormat format to set
* @return format
* @see SwScaler#setInputFormat(Format)
*/
@Override

@ -175,7 +175,8 @@ public DSCaptureDevice[] getCaptureDevices()
/**
* Native method to get capture devices pointers.
*
* @return array of native pointer
* @param ptr native pointer of DSManager
* @return array of native pointer to DSCaptureDevice
*/
private native long[] getCaptureDevices(long ptr);
}

@ -32,7 +32,7 @@ public class ImageStreamingUtils
* @param src source image
* @param width width of scaled image
* @param height height of scaled image
* @param type
* @param type <tt>BufferedImage</tt> type
* @return scaled <tt>BufferedImage</tt>
*/
public static BufferedImage getScaledImage(BufferedImage src,
@ -63,7 +63,7 @@ public static BufferedImage getScaledImage(BufferedImage src,
* @param output output buffer, if not null and if its length is at least
* image's (width * height) * 4, method will put bytes in it.
* @return raw bytes or null if src is not an ARGB
* <tt>BufferedImage</tt>
* <tt>BufferedImage</tt>
*/
public static byte[] getImageBytes(BufferedImage src, byte output[])
{

@ -28,8 +28,8 @@ public class DirectShowStream extends AbstractPushBufferStream
/**
* The pool of <tt>ByteBuffer</tt>s this instances is using to transfer the
* media data captured by {@link #captureOutput} out of this instance
* through the <tt>Buffer</tt>s specified in its {@link #process(Buffer)}.
* media data captured by {@link #grabber} out of this instance
* through the <tt>Buffer</tt>s specified in its {@link #read(Buffer)}.
*/
private final ByteBufferPool bufferPool = new ByteBufferPool();
@ -72,13 +72,13 @@ public class DirectShowStream extends AbstractPushBufferStream
* The <tt>Thread</tt> which is to call
* {@link BufferTransferHandler#transferData(PushBufferStream)} for this
* <tt>DirectShowStream</tt> so that the call is not made in DirectShow
* and we can drop late frames when {@link #automaticallyDropsLateFrames} is
* <tt>false</tt>.
* and we can drop late frames when
* {@link #automaticallyDropsLateVideoFrames} is <tt>false</tt>.
*/
private Thread transferDataThread;
/**
* The indicator which determines whether {@link #captureOutput}
* The indicator which determines whether {@link #grabber}
* automatically drops late frames. If <tt>false</tt>, we have to drop them
* ourselves because DirectShow will buffer them all and the video will
* be late.
@ -332,7 +332,7 @@ public void stop() throws IOException
bufferPool.returnFreeBuffer(data);
data = null;
}
if(nextData != null)
{
bufferPool.returnFreeBuffer(nextData);

@ -6,13 +6,9 @@
*/
package net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.imgstreaming;
import java.awt.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.*;
/**

@ -67,9 +67,9 @@ public DataSource(MediaLocator locator)
* @param locator the <tt>MediaLocator</tt> to create the new instance from
* @param supportedFormats the list of <tt>Format</tt>s in which the new
* instance is to be capable of capturing audio data
* @param audioQuality <tt>true</tt> if audio quality improvement is to be
* enabled in accord with the preferences of the user or <tt>false</tt> to
* completely disable audio quality improvement
* @param audioQualityImprovement <tt>true</tt> if audio quality improvement
* is to be enabled in accord with the preferences of the user or
* <tt>false</tt> to completely disable audio quality improvement
*/
public DataSource(
MediaLocator locator,

@ -56,8 +56,14 @@ public class PortAudioStream
*/
private int framesPerBuffer;
/**
* Native pointer to a PaStreamParameters object.
*/
private long inputParameters = 0;
/**
* Current sequence number.
*/
private int sequenceNumber = 0;
/**
@ -190,6 +196,7 @@ public void read(Buffer buffer)
*
* @param deviceIndex the device index of the PortAudio device to be read
* through this <tt>PullBufferStream</tt>
* @throws IOException if input/output error occurred
*/
synchronized void setDeviceIndex(int deviceIndex)
throws IOException

@ -105,8 +105,8 @@ public class QuickTimeStream
* The <tt>Thread</tt> which is to call
* {@link BufferTransferHandler#transferData(PushBufferStream)} for this
* <tt>QuickTimeStream</tt> so that the call is not made in QuickTime/QTKit
* and we can drop late frames when {@link #automaticallyDropsLateFrames} is
* <tt>false</tt>.
* and we can drop late frames when
* {@link #automaticallyDropsLateVideoFrames} is <tt>false</tt>.
*/
private Thread transferDataThread;

@ -32,6 +32,9 @@ public class JAWTRenderer
*/
private static final String PLUGIN_NAME = "JAWT Renderer";
/**
* Array of supported input formats.
*/
private static final Format[] SUPPORTED_INPUT_FORMATS
= new Format[]
{
@ -374,6 +377,7 @@ public synchronized int process(Buffer buffer)
* <tt>offset</tt> which represent the data to be processed and rendered
* @param width the width of the video frame in <tt>data</tt>
* @param height the height of the video frame in <tt>data</tt>
* @return <tt>true</tt> if data has been successfully processed
*/
private static native boolean process(
long handle,

@ -12,8 +12,16 @@
public class PortAudioException
extends Exception
{
/**
* Serial version UID.
*/
private static final long serialVersionUID = 0L;
/**
* Constructs a <tt>PortAudioException</tt> with <tt>message</tt> as
* description.
* @param message description of the exception
*/
public PortAudioException(String message)
{
super(message);

@ -40,7 +40,7 @@ public abstract class BufferStreamAdapter<T extends SourceStream>
* Initializes a new <tt>BufferStreamAdapter</tt> which is to adapt a
* specific <tt>SourceStream</tt> into a <tt>SourceStream</tt> with
* a specific <tt>Format</tt>.
*
*
* @param stream
* @param format
*/
@ -96,7 +96,7 @@ public Object[] getControls()
/**
* Gets the <tt>Format</tt> of the data this stream provides.
*
*
* @return the <tt>Format</tt> of the data this stream provides
*/
public Format getFormat()
@ -117,7 +117,7 @@ public T getStream()
/**
* Reads byte data from this stream into a specific <tt>Buffer</tt>
* which is to use a specific array of bytes for its data.
*
*
* @param buffer the <tt>Buffer</tt> to read byte data into from this
* instance
* @param bytes the array of <tt>byte</tt>s to read data into from this
@ -136,9 +136,9 @@ protected void read(Buffer buffer, byte[] bytes)
buffer.setData(bytes);
buffer.setOffset(offset);
buffer.setLength(numberOfBytesRead);
Format format = getFormat();
if (format != null)
buffer.setFormat(format);
}
@ -148,11 +148,11 @@ protected void read(Buffer buffer, byte[] bytes)
* Reads byte data from this stream into a specific array of
* <tt>byte</tt>s starting the storing at a specific offset and reading
* at most a specific number of bytes.
*
*
* @param buffer the array of <tt>byte</tt>s into which the data read
* from this stream is to be written
* @param offset the offset in the specified <tt>buffer</tt> at which
* writing data read from this stream should start
* writing data read from this stream should start
* @param length the maximum number of bytes to be written into the
* specified <tt>buffer</tt>
* @return the number of bytes read from this stream and written into the

@ -140,6 +140,13 @@ public int getWidth()
*/
private static native int getWidth(long ptr);
/**
* Native copy from native pointer <tt>src</tt> to byte array <tt>dst</tt>.
* @param dst destination array
* @param dstOffset offset of <tt>dst</tt> to copy data to
* @param dstLength length of <tt>dst</tt>
* @param src native pointer source
*/
public static native void memcpy(
byte[] dst, int dstOffset, int dstLength,
long src);

@ -67,7 +67,7 @@ public RTPTransformConnector(StreamConnector connector)
super(connector);
}
/*
/**
* Overrides RTPConnectorImpl#createControlInputStream() to use
* TransformInputStream.
*/
@ -82,7 +82,7 @@ protected TransformInputStream createControlInputStream()
return controlInputStream;
}
/*
/**
* Overrides RTPConnectorImpl#createControlOutputStream() to use
* TransformOutputStream.
*/
@ -97,7 +97,7 @@ protected TransformOutputStream createControlOutputStream()
return controlOutputStream;
}
/*
/**
* Overrides RTPConnectorImpl#createDataInputStream() to use
* TransformInputStream.
*/
@ -112,7 +112,7 @@ protected TransformInputStream createDataInputStream()
return dataInputStream;
}
/*
/**
* Overrides RTPConnectorImpl#createDataOutputStream() to use
* TransformOutputStream.
*/

@ -9,7 +9,7 @@
/**
* SRTPPolicy holds the SRTP encryption / authentication policy of a SRTP
* session.
*
*
* @author Bing SU (nova.su@gmail.com)
*/
public class SRTPPolicy
@ -18,12 +18,12 @@ public class SRTPPolicy
* Null Cipher, does not change the content of RTP payload
*/
public final static int NULL_ENCRYPTION = 0;
/**
* Counter Mode AES Cipher, defined in Section 4.1.1, RFC3711
*/
public final static int AESCM_ENCRYPTION = 1;
/**
* F8 mode AES Cipher, defined in Section 4.1.2, RFC 3711
*/
@ -33,7 +33,7 @@ public class SRTPPolicy
* Null Authentication, no authentication
*/
public final static int NULL_AUTHENTICATION = 0;
/**
* HMC SHA1 Authentication, defined in Section 4.2.1, RFC3711
*/
@ -43,27 +43,27 @@ public class SRTPPolicy
* SRTP encryption type
*/
private int encType;
/**
* SRTP encryption key length
*/
private int encKeyLength;
/**
* SRTP authentication type
*/
private int authType;
/**
* SRTP authentication key length
*/
private int authKeyLength;
/**
* SRTP authentication tag length
*/
private int authTagLength;
/**
* SRTP salt key length
*/
@ -73,7 +73,7 @@ public class SRTPPolicy
* Construct a SRTPPolicy object based on given parameters.
* This class acts as a storage class, so all the parameters are passed in
* through this constructor.
*
*
* @param encType SRTP encryption type
* @param encKeyLength SRTP encryption key length
* @param authType SRTP authentication type
@ -189,7 +189,7 @@ public int getEncType()
/**
* Set the encryption type
*
* @param encType
* @param encType encryption type
*/
public void setEncType(int encType)
{

@ -12,8 +12,8 @@
* SRTPTransformEngine class implements TransformEngine interface.
* It stores important information / objects regarding SRTP processing.
* Through SRTPTransformEngine, we can get the needed PacketTransformer, which
* will be used by abstract TransformConnector classes.
*
* will be used by abstract TransformConnector classes.
*
* @author Bing SU (nova.su@gmail.com)
*
*/
@ -24,22 +24,22 @@ public class SRTPTransformEngine
* Master key of this SRTP session
*/
private final byte[] masterKey;
/**
* Master salt key of this SRTP session
*/
private final byte[] masterSalt;
/**
* SRTP processing policy
*/
private final SRTPPolicy srtpPolicy;
/**
* SRTCP processing policy
*/
private final SRTPPolicy srtcpPolicy;
/**
* The default SRTPCryptoContext, which will be used to derivate other
* contexts.
@ -49,7 +49,7 @@ public class SRTPTransformEngine
/**
* Construct a SRTPTransformEngine based on given master encryption key,
* master salt key and SRTP/SRTCP policy.
*
*
* @param masterKey the master encryption key
* @param masterSalt the master salt key
* @param srtpPolicy SRTP policy
@ -60,19 +60,19 @@ public SRTPTransformEngine(byte[] masterKey, byte[] masterSalt,
{
this.masterKey = new byte[masterKey.length];
System.arraycopy(masterKey, 0, this.masterKey, 0, masterKey.length);
this.masterSalt = new byte[masterSalt.length];
System.arraycopy(masterSalt, 0, this.masterSalt, 0, masterSalt.length);
this.srtpPolicy = srtpPolicy;
this.srtcpPolicy = srtcpPolicy;
this.defaultContext = new SRTPCryptoContext(0, 0, 0,
this.defaultContext = new SRTPCryptoContext(0, 0, 0,
this.masterKey,
this.masterSalt,
this.masterSalt,
this.srtpPolicy);
}
/**
* Gets the <tt>PacketTransformer</tt> for RTCP packets.
*
@ -83,7 +83,8 @@ public PacketTransformer getRTCPTransformer()
return new SRTCPTransformer(this);
}
/* (non-Javadoc)
/*
* (non-Javadoc)
* @see net.java.sip.communicator.impl.media.transform.
* TransformEngine#getRTPTransformer()
*/
@ -101,7 +102,7 @@ public byte[] getMasterKey()
{
return this.masterKey;
}
/**
* Get the master salt key
*

@ -367,7 +367,7 @@ public ZRTPTransformEngine()
/**
* Returns an instance of <tt>ZRTPCTransformer</tt>.
*
*
* @see TransformEngine#getRTCPTransformer()
*/
public PacketTransformer getRTCPTransformer()
@ -377,7 +377,7 @@ public PacketTransformer getRTCPTransformer()
/**
* Returns this RTPTransformer.
*
*
* @see TransformEngine#getRTPTransformer()
*/
public PacketTransformer getRTPTransformer()
@ -405,7 +405,7 @@ public boolean initialize(String zidFilename, ZrtpConfigure config)
* auto-sensing and a default configuration setting.
*
* @param zidFilename The ZID file name
* @param autoEnable If true start with auto-sensing mode.
* @param autoEnable If true start with auto-sensing mode.
* @return true if initialization fails, false if succeeds
*/
public boolean initialize(String zidFilename, boolean autoEnable) {
@ -414,7 +414,7 @@ public boolean initialize(String zidFilename, boolean autoEnable) {
/**
* Default engine initialization method.
*
*
* Calling this for engine initialization and start it with auto-sensing
* and default configuration setting.
*
@ -436,7 +436,7 @@ public boolean initialize(String zidFilename) {
* @param config the zrtp config to use
* @return true if initialization fails, false if succeeds
*/
public synchronized boolean initialize(String zidFilename,
public synchronized boolean initialize(String zidFilename,
boolean autoEnable, ZrtpConfigure config)
{
// Get a reference to the FileAccessService
@ -585,10 +585,10 @@ public void cleanup()
/**
* Set the SSRC of the RTP transmitter stream.
*
*
* ZRTP fills the SSRC in the ZRTP messages.
*
* @param ssrc
*
* @param ssrc SSRC to set
*/
public void setOwnSSRC(long ssrc) {
ownSSRC = (int)(ssrc & 0xffffffff);
@ -597,14 +597,14 @@ public void setOwnSSRC(long ssrc) {
/**
* The data output stream calls this method to transform outgoing
* packets.
*
*
* @see PacketTransformer#transform(RawPacket)
*/
public RawPacket transform(RawPacket pkt)
{
/*
* Never transform outgoing ZRTP (invalid RTP) packets.
*/
*/
if (ZrtpRawPacket.isZrtpData(pkt))
{
return pkt;
@ -629,7 +629,7 @@ public RawPacket transform(RawPacket pkt)
/**
* The input data stream calls this method to transform
* incoming packets.
*
*
* @see PacketTransformer#reverseTransform(RawPacket)
*/
public RawPacket reverseTransform(RawPacket pkt)
@ -654,7 +654,7 @@ public RawPacket reverseTransform(RawPacket pkt)
pkt = srtpInTransformer.reverseTransform(pkt);
// if packet was valid (i.e. not null) and ZRTP engine started and
// in Wait for Confirm2 Ack then emulate a Conf2Ack packet.
// in Wait for Confirm2 Ack then emulate a Conf2Ack packet.
// See ZRTP specification chap. 5.6
if ((pkt != null)
&& started
@ -667,9 +667,9 @@ public RawPacket reverseTransform(RawPacket pkt)
}
/*
* If ZRTP is enabled process it.
*
* In any case return null because ZRTP packets must never reach
* If ZRTP is enabled process it.
*
* In any case return null because ZRTP packets must never reach
* the application.
*/
if (enableZrtp && started)
@ -833,10 +833,10 @@ public boolean srtpSecretsReady(
}
/**
*
*
* @param c
* @param s
* @param verified
* @param verified
* @see gnu.java.zrtp.ZrtpCallback#srtpSecretsOn(java.lang.String,
* java.lang.String, boolean)
*/
@ -855,7 +855,7 @@ public void srtpSecretsOn(String c, String s, boolean verified)
/**
* This method shall clear the ZRTP secrets.
*
*
* @param part Defines for which part (sender or receiver)
* to switch on security
*/
@ -879,7 +879,7 @@ public void srtpSecretsOff(EnableSecurity part)
/**
* Activate timer.
* @param time The time in ms for the timer.
* @param time The time in ms for the timer.
* @return always return 1.
*/
public int activateTimer(int time)
@ -919,8 +919,8 @@ public void handleTimeout()
/**
* Send information messages to the hosting environment.
* @param severity This defines the message's severity
* @param subCode The message code.
* @param severity This defines the message's severity
* @param subCode The message code.
*/
public void sendInfo(ZrtpCodes.MessageSeverity severity, EnumSet<?> subCode)
{
@ -933,7 +933,7 @@ public void sendInfo(ZrtpCodes.MessageSeverity severity, EnumSet<?> subCode)
/**
* Comes a message that zrtp negotiation has failed.
* @param severity This defines the message's severity
* @param subCode The message code.
* @param subCode The message code.
*/
public void zrtpNegotiationFailed(ZrtpCodes.MessageSeverity severity,
EnumSet<?> subCode)
@ -968,8 +968,8 @@ public void zrtpAskEnrollment(String info)
}
/**
*
* @param info
*
* @param info
* @see gnu.java.zrtp.ZrtpCallback#zrtpInformEnrollment(java.lang.String)
*/
public void zrtpInformEnrollment(String info)
@ -981,8 +981,8 @@ public void zrtpInformEnrollment(String info)
}
/**
*
* @param sas
*
* @param sas
* @see gnu.java.zrtp.ZrtpCallback#signSAS(java.lang.String)
*/
public void signSAS(String sas)
@ -994,7 +994,7 @@ public void signSAS(String sas)
}
/**
*
*
* @param sas
* @return false if signature check fails, true otherwise
* @see gnu.java.zrtp.ZrtpCallback#checkSASSignature(java.lang.String)
@ -1070,7 +1070,7 @@ public void requestGoSecure()
*
* @param data The auxilliary secret data
*/
public void setAuxSecret(byte[] data)
public void setAuxSecret(byte[] data)
{
if (zrtpEngine != null)
zrtpEngine.setAuxSecret(data);
@ -1257,8 +1257,8 @@ public SecurityEventManager getUserCallback()
/**
* Get other party's ZID (ZRTP Identifier) data
*
* This functions returns the other party's ZID that was receivied
* during ZRTP processing.
* This functions returns the other party's ZID that was receivied
* during ZRTP processing.
*
* The ZID data can be retrieved after ZRTP receive the first Hello
* packet from the other party. The application may call this method

@ -13,50 +13,50 @@
/**
* ZRTP packet representation.
*
*
* This class extends the RawPacket class and adds some methods
* required by the ZRTP transformer.
*
*
* @author Werner Dittmann <Werner.Dittmann@t-online.de>
*/
public class ZrtpRawPacket extends RawPacket
public class ZrtpRawPacket extends RawPacket
{
/**
* Each ZRTP packet contains this magic number.
*/
public static byte[] zrtpMagic;
static {
zrtpMagic = new byte[4];
zrtpMagic[0]= 0x5a;
zrtpMagic[1]= 0x52;
zrtpMagic[2]= 0x54;
zrtpMagic[0]= 0x5a;
zrtpMagic[1]= 0x52;
zrtpMagic[2]= 0x54;
zrtpMagic[3]= 0x50;
}
/**
* Construct an input ZrtpRawPacket using a received RTP raw packet.
*
* @param pkt a raw RTP packet as received
*
* @param pkt a raw RTP packet as received
*/
public ZrtpRawPacket(RawPacket pkt)
public ZrtpRawPacket(RawPacket pkt)
{
super (pkt.getBuffer(), pkt.getOffset(), pkt.getLength());
}
/**
* Construct an output ZrtpRawPacket using specified value.
*
*
* Initialize this packet and set the ZRTP magic value
* to mark it as a ZRTP packet.
*
*
* @param buf Byte array holding the content of this Packet
* @param off Start offset of packet content inside buffer
* @param len Length of the packet's data
*/
public ZrtpRawPacket(byte[] buf, int off, int len)
public ZrtpRawPacket(byte[] buf, int off, int len)
{
super (buf, off, len);
super (buf, off, len);
writeByte(0, (byte)0x10);
writeByte(1, (byte)0);
@ -69,13 +69,13 @@ public ZrtpRawPacket(byte[] buf, int off, int len)
/**
* Check if it could be a ZRTP packet.
*
*
* The method checks if the first byte of the received data
* matches the defined ZRTP pattern 0x10
*
*
* @return true if could be a ZRTP packet, false otherwise.
*/
protected boolean isZrtpPacket()
protected boolean isZrtpPacket()
{
return isZrtpData(this);
}
@ -99,13 +99,13 @@ static boolean isZrtpData(RawPacket pkt)
/**
* Check if it is really a ZRTP packet.
*
*
* The method checks if the packet contains the ZRTP magic
* number.
*
*
* @return true if packet contains the magic number, false otherwise.
*/
protected boolean hasMagic()
protected boolean hasMagic()
{
return
(readByte(4) == zrtpMagic[0])
@ -116,9 +116,9 @@ protected boolean hasMagic()
/**
* Set the sequence number in this packet.
* @param seq
* @param seq sequence number
*/
protected void setSeqNum(short seq)
protected void setSeqNum(short seq)
{
int at = 2;
writeByte(at++, (byte)(seq>>8));
@ -127,9 +127,9 @@ protected void setSeqNum(short seq)
/**
* Set SSRC in this packet
* @param ssrc
* @param ssrc SSRC to set
*/
protected void setSSRC(int ssrc)
protected void setSSRC(int ssrc)
{
writeInt(8, ssrc);
}
@ -139,7 +139,7 @@ protected void setSSRC(int ssrc)
*
* @return true if the CRC is valid, false otherwise
*/
protected boolean checkCrc()
protected boolean checkCrc()
{
int crc = readInt(getLength()-ZrtpPacketBase.CRC_SIZE);
return ZrtpCrc32.zrtpCheckCksum(getBuffer(), getOffset(),
@ -149,7 +149,7 @@ protected boolean checkCrc()
/**
* Set ZRTP CRC in this packet
*/
protected void setCrc()
protected void setCrc()
{
int crc = ZrtpCrc32.zrtpGenerateCksum(getBuffer(), getOffset(),
getLength() - ZrtpPacketBase.CRC_SIZE);

Loading…
Cancel
Save