Creates less garbage in the QuickTime video CaptureDevice in order to resolve short freezes in the video. Comes at the expense of execution speed due to an additional copying of each frame. The performance penalty is to be addressed in a subsequent commit.

cusax-fix
Lyubomir Marinov 16 years ago
parent a6b3c18e0e
commit c87fd58625

@ -7,14 +7,30 @@
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer
* Method: getByteCount
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getByteCount
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer
* Method: getBytes
* Signature: (J)[B
*/
JNIEXPORT jbyteArray JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes
JNIEXPORT jbyteArray JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes__J
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer
* Method: getBytes
* Signature: (JJI)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes__JJI
(JNIEnv *, jclass, jlong, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer
* Method: getHeight
@ -31,6 +47,14 @@ JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CV
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getWidth
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer
* Method: memcpy
* Signature: ([BIIJ)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_memcpy
(JNIEnv *, jclass, jbyteArray, jint, jint, jlong);
#ifdef __cplusplus
}
#endif

@ -1,33 +1,60 @@
#include "net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer.h"
#import <CoreVideo/CVPixelBuffer.h>
#include <string.h>
JNIEXPORT jbyteArray JNICALL
Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes
(JNIEnv *jniEnv, jclass clazz, jlong ptr)
static size_t
CVPixelBuffer_getByteCount(CVPixelBufferRef pixelBuffer, size_t planeCount)
{
CVPixelBufferRef pixelBuffer;
size_t planeCount;
size_t byteCount;
jbyteArray bytes;
pixelBuffer = (CVPixelBufferRef) ptr;
planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
if (planeCount)
{
size_t planeIndex;
byteCount = 0;
for (planeIndex = 0; planeIndex < planeCount; planeIndex++)
{
byteCount
+= CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex)
* CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
}
}
else
{
byteCount
= CVPixelBufferGetBytesPerRow(pixelBuffer)
* CVPixelBufferGetHeight(pixelBuffer);
}
return byteCount;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getByteCount
(JNIEnv *jniEnv, jclass clazz, jlong ptr)
{
CVPixelBufferRef pixelBuffer;
size_t planeCount;
pixelBuffer = (CVPixelBufferRef) ptr;
planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
return (jint) CVPixelBuffer_getByteCount(pixelBuffer, planeCount);
}
JNIEXPORT jbyteArray JNICALL
Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes__J
(JNIEnv *jniEnv, jclass clazz, jlong ptr)
{
CVPixelBufferRef pixelBuffer;
size_t planeCount;
size_t byteCount;
jbyteArray bytes;
pixelBuffer = (CVPixelBufferRef) ptr;
planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
byteCount = CVPixelBuffer_getByteCount(pixelBuffer, planeCount);
bytes = (*jniEnv)->NewByteArray(jniEnv, byteCount);
if (!bytes)
return NULL;
@ -75,6 +102,59 @@ Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes
return bytes;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes__JJI
(JNIEnv *jniEnv, jclass clazz, jlong ptr, jlong buf, jint bufLength)
{
CVPixelBufferRef pixelBuffer;
size_t byteCount;
pixelBuffer = (CVPixelBufferRef) ptr;
if (kCVReturnSuccess == CVPixelBufferLockBaseAddress(pixelBuffer, 0))
{
size_t planeCount;
jbyte *cBytes;
planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
byteCount = CVPixelBuffer_getByteCount(pixelBuffer, planeCount);
if (planeCount)
{
size_t byteOffset;
size_t planeIndex;
byteOffset = 0;
for (planeIndex = 0; planeIndex < planeCount; planeIndex++)
{
cBytes
= CVPixelBufferGetBaseAddressOfPlane(
pixelBuffer,
planeIndex);
byteCount
+= CVPixelBufferGetBytesPerRowOfPlane(
pixelBuffer,
planeIndex)
* CVPixelBufferGetHeightOfPlane(
pixelBuffer,
planeIndex);
memcpy(buf, cBytes, byteCount);
byteOffset += byteCount;
}
byteCount = byteOffset;
}
else
{
cBytes = CVPixelBufferGetBaseAddress(pixelBuffer);
memcpy(buf, cBytes, byteCount);
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
else
byteCount = 0;
return byteCount;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getHeight
(JNIEnv *jniEnv, jclass clazz, jlong ptr)
@ -88,3 +168,15 @@ Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getWidth
{
return (jint) CVPixelBufferGetWidth((CVPixelBufferRef) ptr);
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_memcpy
(JNIEnv *jniEnv, jclass clazz,
jbyteArray dst, jint dstOffset, jint dstLength,
jlong src)
{
(*jniEnv)->SetByteArrayRegion(
jniEnv,
dst, dstOffset, dstLength,
(jbyte *) src);
}

@ -19,12 +19,19 @@
* other <tt>VideoFormat</tt>s and a very obvious one.
*
* @author Lubomir Marinov
* @author Sebastien Vincent
*/
public class AVFrameFormat
extends VideoFormat
{
/**
* Native FFMPEG format used.
* The encoding of the <tt>AVFrameFormat</tt> instances.
*/
public static final String AVFRAME = "AVFrame";
/**
* The native FFmpeg format represented by this instance.
*/
private int pixFmt;
@ -40,10 +47,13 @@ public AVFrameFormat()
/**
* Initializes a new <tt>AVFrameFormat</tt> instance with specific size and
* frame rate.
*
* @param size the <tt>Dimension</tt> of the new instance
* @param frameRate the frame rate of the new instance
*/
public AVFrameFormat(Dimension size, float frameRate)
{
super("AVFrame", size, NOT_SPECIFIED, AVFrame.class, frameRate);
super(AVFRAME, size, NOT_SPECIFIED, AVFrame.class, frameRate);
this.pixFmt = FFmpeg.PIX_FMT_YUV420P;
}
@ -65,9 +75,11 @@ public Object clone()
}
/**
* Copy specified <tt>Format</tt>.
* Copies the properties of the specified <tt>Format</tt> into this
* instance.
*
* @param f <tt>Format</tt> to be copied
* @param f the <tt>Format</tt> the properties of which are to be copied
* into this instance
*/
@Override
protected void copy(Format f)
@ -106,9 +118,9 @@ public boolean equals(Object obj)
}
/**
* Get the native FFMPEG format.
* Gets the native FFmpeg format represented by this instance.
*
* @return native format
* @return the native FFmpeg format represented by this instance
*/
public int getPixFmt()
{

@ -9,6 +9,7 @@
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.quicktime.*;
import net.java.sip.communicator.util.*;
@ -57,6 +58,7 @@ public QuickTimeAuto()
+ inputDevice.uniqueID()),
new Format[]
{
//new AVFrameFormat(),
new RGBFormat(),
//new YUVFormat(YUVFormat.YUV_420)
});

@ -6,16 +6,19 @@
*/
package net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.quicktime;
import java.awt.*;
import java.awt.Dimension; // disambiguation
import java.io.*;
import java.util.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import javax.media.protocol.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.*;
import net.java.sip.communicator.impl.neomedia.quicktime.*;
import net.java.sip.communicator.util.*;
/**
* Implements a <tt>PushBufferStream</tt> using QuickTime/QTKit.
@ -26,6 +29,13 @@ public class QuickTimeStream
extends AbstractPushBufferStream
{
/**
* The <tt>Logger</tt> used by the <tt>QuickTimeStream</tt> class and its
* instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(QuickTimeStream.class);
/**
* The indicator which determines whether {@link #captureOutput}
* automatically drops late frames. If <tt>false</tt>, we have to drop them
@ -34,16 +44,36 @@ public class QuickTimeStream
*/
private final boolean automaticallyDropsLateVideoFrames;
/**
* The pool of <tt>ByteBuffer</tt>s this instances is using to transfer the
* media data captured by {@link #captureOutput} out of this instance
* through the <tt>Buffer</tt>s specified in its {@link #process(Buffer)}.
*/
private final List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
/**
* The <tt>QTCaptureOutput</tt> represented by this <tt>SourceStream</tt>.
*/
final QTCaptureDecompressedVideoOutput captureOutput
= new QTCaptureDecompressedVideoOutput();
/**
* The <tt>VideoFormat</tt> which has been successfully set on
* {@link #captureOutput}.
*/
private VideoFormat captureOutputFormat;
/**
* The indicator which determines whether this <tt>QuickTimeStream</tt> has
* been closed. Introduced to determine when <tt>ByteBuffer</tt>s are to be
* disposed of and no longer be pooled in {@link #buffers}.
*/
private boolean closed = false;
/**
* The captured media data to be returned in {@link #read(Buffer)}.
*/
private byte[] data;
private ByteBuffer data;
/**
* The <tt>Format</tt> of {@link #data} if known. If possible, determined by
@ -75,7 +105,7 @@ public class QuickTimeStream
* <tt>QuickTimeStream</tt> to provide the latest available frame and not
* wait for QuickTime/QTKit to capture a new one.
*/
private byte[] nextData;
private ByteBuffer nextData;
/**
* The <tt>Format</tt> of {@link #nextData} if known.
@ -172,18 +202,45 @@ private void captureOutputDidOutputVideoFrameWithSampleBuffer(
{
if (!automaticallyDropsLateVideoFrames && (data != null))
{
nextData = pixelBuffer.getBytes();
nextDataTimeStamp = System.nanoTime();
if (nextDataFormat == null)
nextDataFormat = getVideoFrameFormat(pixelBuffer);
if (nextData != null)
{
returnFreeBuffer(nextData);
nextData = null;
}
nextData = getFreeBuffer(pixelBuffer.getByteCount());
if (nextData != null)
{
nextData.setLength(
pixelBuffer.getBytes(
nextData.ptr,
nextData.capacity));
nextDataTimeStamp = System.nanoTime();
if (nextDataFormat == null)
nextDataFormat = getVideoFrameFormat(pixelBuffer);
}
return;
}
data = pixelBuffer.getBytes();
dataTimeStamp = System.nanoTime();
if (dataFormat == null)
dataFormat = getVideoFrameFormat(pixelBuffer);
nextData = null;
if (data != null)
{
returnFreeBuffer(data);
data = null;
}
data = getFreeBuffer(pixelBuffer.getByteCount());
if (data != null)
{
data.setLength(pixelBuffer.getBytes(data.ptr, data.capacity));
dataTimeStamp = System.nanoTime();
if (dataFormat == null)
dataFormat = getVideoFrameFormat(pixelBuffer);
}
if (nextData != null)
{
returnFreeBuffer(nextData);
nextData = null;
}
if (automaticallyDropsLateVideoFrames)
transferData = (data != null);
@ -216,6 +273,32 @@ public void close()
super.close();
captureOutput.setDelegate(null);
synchronized (buffers)
{
closed = true;
Iterator<ByteBuffer> bufferIter = buffers.iterator();
boolean loggerIsTraceEnabled = logger.isTraceEnabled();
int leakedCount = 0;
while (bufferIter.hasNext())
{
ByteBuffer buffer = bufferIter.next();
if (buffer.isFree())
{
bufferIter.remove();
FFmpeg.av_free(buffer.ptr);
} else if (loggerIsTraceEnabled)
leakedCount++;
}
if (loggerIsTraceEnabled)
{
logger.trace(
"Leaking " + leakedCount + " ByteBuffer instances.");
}
}
}
/**
@ -250,7 +333,9 @@ protected Format doGetFormat()
.intersects(
new VideoFormat(
null,
new Dimension(640, 480),
new Dimension(
DataSource.DEFAULT_WIDTH,
DataSource.DEFAULT_HEIGHT),
Format.NOT_SPECIFIED,
Format.byteArray,
Format.NOT_SPECIFIED));
@ -304,7 +389,19 @@ private Format getCaptureOutputFormat()
2, 3, 4);
case CVPixelFormatType.kCVPixelFormatType_420YpCbCr8Planar:
if ((width == 0) && (height == 0))
return new YUVFormat(YUVFormat.YUV_420);
{
if (captureOutputFormat instanceof AVFrameFormat)
return new AVFrameFormat();
else
return new YUVFormat(YUVFormat.YUV_420);
}
else if (captureOutputFormat instanceof AVFrameFormat)
{
return
new AVFrameFormat(
new Dimension(width, height),
Format.NOT_SPECIFIED);
}
else
{
int strideY = width;
@ -328,6 +425,48 @@ private Format getCaptureOutputFormat()
return null;
}
/**
* Gets a <tt>ByteBuffer</tt> out of the pool of free <tt>ByteBuffer</tt>s
* (i.e. <tt>ByteBuffer</tt>s ready for writing captured media data into
* them) which is capable to receiving at least <tt>capacity</tt> number of
* bytes.
*
* @param capacity the minimal number of bytes that the returned
* <tt>ByteBuffer</tt> is to be capable of receiving
* @return a <tt>ByteBuffer</tt> which is ready for writing captured media
* data into and which is capable of receiving at least <tt>capacity</tt>
* number of bytes
*/
private ByteBuffer getFreeBuffer(int capacity)
{
synchronized (buffers)
{
if (closed)
return null;
int bufferCount = buffers.size();
ByteBuffer freeBuffer = null;
for (int bufferIndex = 0; bufferIndex < bufferCount; bufferIndex++)
{
ByteBuffer buffer = buffers.get(bufferIndex);
if (buffer.isFree() && (buffer.capacity >= capacity))
{
freeBuffer = buffer;
break;
}
}
if (freeBuffer == null)
{
freeBuffer = new ByteBuffer(capacity);
buffers.add(freeBuffer);
}
freeBuffer.setFree(false);
return freeBuffer;
}
}
/**
* Gets the <tt>Format</tt> of the media data made available by this
* <tt>PushBufferStream</tt> as indicated by a specific
@ -378,15 +517,32 @@ public void read(Buffer buffer)
buffer.setLength(0);
else
{
buffer.setData(data);
buffer
.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME);
Object bufferData = buffer.getData();
byte[] bufferByteData = null;
int dataLength = data.getLength();
if (bufferData instanceof byte[])
{
bufferByteData = (byte[]) bufferData;
if (bufferByteData.length < dataLength)
bufferByteData = null;
}
if (bufferByteData == null)
{
bufferByteData = new byte[dataLength];
buffer.setData(bufferByteData);
}
CVPixelBuffer.memcpy(bufferByteData, 0, dataLength, data.ptr);
buffer.setFlags(
Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME);
if (dataFormat != null)
buffer.setFormat(dataFormat);
buffer.setLength(data.length);
buffer.setLength(dataLength);
buffer.setOffset(0);
buffer.setTimeStamp(dataTimeStamp);
returnFreeBuffer(data);
data = null;
if (!automaticallyDropsLateVideoFrames)
@ -395,6 +551,24 @@ public void read(Buffer buffer)
}
}
/**
* Returns a specific <tt>ByteBuffer</tt> into the pool of free
* <tt>ByteBuffer</tt>s (i.e. <tt>ByteBuffer</tt>s ready for writing
* captured media data into them).
*
* @param buffer the <tt>ByteBuffer</tt> to be returned into the pool of
* free <tt>ByteBuffer</tt>s
*/
private void returnFreeBuffer(ByteBuffer buffer)
{
synchronized (buffers)
{
buffer.setFree(true);
if (closed && buffers.remove(buffer))
FFmpeg.av_free(buffer.ptr);
}
}
/**
* Calls {@link BufferTransferHandler#transferData(PushBufferStream)} from
* inside {@link #transferDataThread} so that the call is not made in
@ -415,6 +589,12 @@ private void runInTransferDataThread()
synchronized (dataSyncRoot)
{
if (data != null)
{
returnFreeBuffer(data);
data = null;
}
data = nextData;
dataTimeStamp = nextDataTimeStamp;
if (dataFormat == null)
@ -457,7 +637,7 @@ private void runInTransferDataThread()
}
/**
* Set the <tt>Format</tt> of the media data made available by this
* Sets the <tt>Format</tt> of the media data made available by this
* <tt>PushBufferStream</tt> to {@link #captureOutput}.
*
* @param format the <tt>Format</tt> of the media data made available by
@ -502,7 +682,25 @@ private void setCaptureOutputFormat(Format format)
CVPixelBufferAttributeKey.kCVPixelBufferHeightKey);
}
if (format.isSameEncoding(VideoFormat.RGB))
String encoding;
if (format instanceof AVFrameFormat)
{
int pixfmt = ((AVFrameFormat) format).getPixFmt();
if (pixfmt == FFmpeg.PIX_FMT_YUV420P)
encoding = VideoFormat.YUV;
else
encoding = null;
}
else if (format.isSameEncoding(VideoFormat.RGB))
encoding = VideoFormat.RGB;
else if (format.isSameEncoding(VideoFormat.YUV))
encoding = VideoFormat.YUV;
else
encoding = null;
if (VideoFormat.RGB.equalsIgnoreCase(encoding))
{
if (pixelBufferAttributes == null)
pixelBufferAttributes = new NSMutableDictionary();
@ -511,7 +709,7 @@ private void setCaptureOutputFormat(Format format)
CVPixelFormatType.kCVPixelFormatType_32ARGB,
CVPixelBufferAttributeKey.kCVPixelBufferPixelFormatTypeKey);
}
else if (format.isSameEncoding(VideoFormat.YUV))
else if (VideoFormat.YUV.equalsIgnoreCase(encoding))
{
if (pixelBufferAttributes == null)
pixelBufferAttributes = new NSMutableDictionary();
@ -524,7 +722,10 @@ else if (format.isSameEncoding(VideoFormat.YUV))
throw new IllegalArgumentException("format");
if (pixelBufferAttributes != null)
{
captureOutput.setPixelBufferAttributes(pixelBufferAttributes);
captureOutputFormat = videoFormat;
}
}
/**
@ -566,13 +767,128 @@ public void stop()
synchronized (dataSyncRoot)
{
data = null;
if (data != null)
{
returnFreeBuffer(data);
data = null;
}
dataFormat = null;
nextData = null;
if (nextData != null)
{
returnFreeBuffer(nextData);
nextData = null;
}
nextDataFormat = null;
if (!automaticallyDropsLateVideoFrames)
dataSyncRoot.notifyAll();
}
}
/**
* Represents a buffer of native memory with a specific size/capacity which
* either contains a specific number of bytes of valid data or is free for
* consumption.
*/
private static class ByteBuffer
{
/**
* The maximum number of bytes which can be written into the native
* memory represented by this instance.
*/
public final int capacity;
/**
* The indicator which determines whether this instance is free to be
* written bytes into.
*/
private boolean free;
/**
* The number of bytes of valid data that the native memory represented
* by this instance contains.
*/
private int length;
/**
* The pointer to the native memory represented by this instance.
*/
public final long ptr;
/**
* Initializes a new <tt>ByteBuffer</tt> instance with a specific
* <tt>capacity</tt>.
*
* @param capacity the maximum number of bytes which can be written into
* the native memory represented by the new instance
*/
public ByteBuffer(int capacity)
{
this.capacity = capacity;
this.ptr = FFmpeg.av_malloc(this.capacity);
this.free = true;
this.length = 0;
if (this.ptr == 0)
{
throw
new OutOfMemoryError(
getClass().getSimpleName()
+ " with capacity "
+ this.capacity);
}
}
/**
* Gets the number of bytes of valid data that the native memory
* represented by this instance contains.
*
* @return the number of bytes of valid data that the native memory
* represented by this instance contains
*/
public int getLength()
{
return length;
}
/**
* Determines whether this instance is free to be written bytes into.
*
* @return <tt>true</tt> if this instance is free to be written bytes
* into or <tt>false</tt> is the native memory represented by this
* instance is already is use
*/
public boolean isFree()
{
return free;
}
/**
* Sets the indicator which determines whether this instance is free to
* be written bytes into.
*
* @param free <tt>true</tt> if this instance is to be made available
* for writing bytes into; otherwise, <tt>false</tt>
*/
public void setFree(boolean free)
{
this.free = free;
if (this.free)
setLength(0);
}
/**
* Sets the number of bytes of valid data that the native memory
* represented by this instance contains.
*
* @param length the number of bytes of valid data that the native
* memory represented by this instance contains
*/
public void setLength(int length)
{
this.length = length;
}
}
}

@ -27,6 +27,29 @@ public CVPixelBuffer(long ptr)
super(ptr);
}
/**
* Gets the number of bytes which represent the pixels of the associated
* CoreVideo <tt>CVPixelBufferRef</tt>.
*
* @return the number of bytes which represent the pixels of the associated
* CoreVideo <tt>CVPixelBufferRef</tt>
*/
public int getByteCount()
{
return getByteCount(getPtr());
}
/**
* Gets the number of bytes which represent the pixels of a specific
* CoreVideo <tt>CVPixelBufferRef</tt>.
*
* @param ptr the <tt>CVPixelBufferRef</tt> to get the number of bytes which
* represent its pixels of
* @return the number of bytes which represent the pixels of the specified
* CoreVideo <tt>CVPixelBufferRef</tt>
*/
private static native int getByteCount(long ptr);
/**
* Gets a <tt>byte</tt> array which represents the pixels of the associated
* CoreVideo <tt>CVPixelBufferRef</tt>.
@ -49,6 +72,32 @@ public byte[] getBytes()
*/
private static native byte[] getBytes(long ptr);
/**
* Gets the bytes which represent the pixels of the associated
* <tt>CVPixelBufferRef</tt> into a specific native byte buffer with a
* specific capacity.
*
* @param buf the native byte buffer to return the bytes into
* @param bufLength the capacity in bytes of <tt>buf</tt>
* @return the number of bytes written into <tt>buf</tt>
*/
public int getBytes(long buf, int bufLength)
{
return getBytes(getPtr(), buf, bufLength);
}
/**
* Gets the bytes which represent the pixels of a specific
* <tt>CVPixelBufferRef</tt> into a specific native byte buffer with a
* specific capacity.
*
* @param ptr the <tt>CVPixelBufferRef</tt> to get the bytes of
* @param buf the native byte buffer to return the bytes into
* @param bufLength the capacity in bytes of <tt>buf</tt>
* @return the number of bytes written into <tt>buf</tt>
*/
private static native int getBytes(long ptr, long buf, int bufLength);
/**
* Gets the height in pixels of this <tt>CVPixelBuffer</tt>.
*
@ -90,4 +139,8 @@ public int getWidth()
* <tt>CVPixelBufferRef</tt>
*/
private static native int getWidth(long ptr);
public static native void memcpy(
byte[] dst, int dstOffset, int dstLength,
long src);
}

Loading…
Cancel
Save