Works towards (but does not really enable i.e. represents work in progress) making the QTKit CaptureDevice on Mac OS X output video in YUV420 used by the H.264 encoder in order to avoid scaling.

cusax-fix
Lyubomir Marinov 16 years ago
parent 887973e954
commit aae756dd56

@ -7,14 +7,27 @@ Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes
(JNIEnv *jniEnv, jclass clazz, jlong ptr)
{
CVPixelBufferRef pixelBuffer;
size_t planeCount;
size_t byteCount;
jbyteArray bytes;
pixelBuffer = (CVPixelBufferRef) ptr;
byteCount
= CVPixelBufferGetBytesPerRow(pixelBuffer)
* CVPixelBufferGetHeight(pixelBuffer);
planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
if (planeCount)
{
size_t planeIndex;
byteCount = 0;
for (planeIndex = 0; planeIndex < planeCount; planeIndex++)
byteCount
+= CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex)
* CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
}
else
byteCount
= CVPixelBufferGetBytesPerRow(pixelBuffer)
* CVPixelBufferGetHeight(pixelBuffer);
bytes = (*jniEnv)->NewByteArray(jniEnv, byteCount);
if (!bytes)
return NULL;
@ -23,8 +36,40 @@ Java_net_java_sip_communicator_impl_neomedia_quicktime_CVPixelBuffer_getBytes
{
jbyte *cBytes;
cBytes = CVPixelBufferGetBaseAddress(pixelBuffer);
(*jniEnv)->SetByteArrayRegion(jniEnv, bytes, 0, byteCount, cBytes);
if (planeCount)
{
size_t byteOffset;
size_t planeIndex;
byteOffset = 0;
for (planeIndex = 0; planeIndex < planeCount; planeIndex++)
{
cBytes
= CVPixelBufferGetBaseAddressOfPlane(
pixelBuffer,
planeIndex);
byteCount
+= CVPixelBufferGetBytesPerRowOfPlane(
pixelBuffer,
planeIndex)
* CVPixelBufferGetHeightOfPlane(
pixelBuffer,
planeIndex);
(*jniEnv)
->SetByteArrayRegion(
jniEnv,
bytes,
byteOffset,
byteCount,
cBytes);
byteOffset += byteCount;
}
}
else
{
cBytes = CVPixelBufferGetBaseAddress(pixelBuffer);
(*jniEnv)->SetByteArrayRegion(jniEnv, bytes, 0, byteCount, cBytes);
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
return bytes;

@ -1,7 +1,5 @@
#include "net_java_sip_communicator_impl_neomedia_quicktime_QTCaptureDecompressedVideoOutput.h"
#include <stdio.h>
#import <CoreVideo/CVImageBuffer.h>
#import <Foundation/NSAutoreleasePool.h>
#import <Foundation/NSDictionary.h>

@ -104,17 +104,25 @@ public FormatInfo(VideoFormat format)
Dimension size = format.getSize();
int width = size.width;
double xScale =
(width == preferredWidth)
? 1
: (preferredWidth / (double) width);
int height = size.height;
double yScale =
(height == preferredHeight)
? 1
: (preferredHeight / (double) height);
int width = (size == null) ? 0 : size.width;
double xScale;
if (width == 0)
xScale = Double.POSITIVE_INFINITY;
else if (width == preferredWidth)
xScale = 1;
else
xScale = (preferredWidth / (double) width);
int height = (size == null) ? 0 : size.height;
double yScale;
if (height == 0)
yScale = Double.POSITIVE_INFINITY;
else if (height == preferredHeight)
yScale = 1;
else
yScale = (preferredHeight / (double) height);
difference = Math.abs(1 - Math.min(xScale, yScale));
}
@ -145,6 +153,20 @@ public int compare(FormatInfo info0, FormatInfo info1)
});
selectedFormat = infos[0].format;
}
// if ((selectedFormat != null)
// && (selectedFormat.getSize() == null))
// selectedFormat
// = (VideoFormat)
// selectedFormat
// .intersects(
// new VideoFormat(
// null,
// new Dimension(
// preferredWidth,
// preferredHeight),
// Format.NOT_SPECIFIED,
// null,
// Format.NOT_SPECIFIED));
}
formatControl.setFormat(selectedFormat);
@ -321,7 +343,8 @@ public Component createLocalVisualComponent()
return
(deviceSession instanceof VideoMediaDeviceSession)
? ((VideoMediaDeviceSession) deviceSession).createLocalVisualComponent()
? ((VideoMediaDeviceSession) deviceSession)
.createLocalVisualComponent()
: null;
}
@ -331,10 +354,10 @@ public Component createLocalVisualComponent()
public void disposeLocalVisualComponent()
{
MediaDeviceSession deviceSession = getDeviceSession();
if(deviceSession instanceof VideoMediaDeviceSession)
{
((VideoMediaDeviceSession) deviceSession).disposeLocalVisualComponent();
}
((VideoMediaDeviceSession) deviceSession)
.disposeLocalVisualComponent();
}
/**

@ -55,7 +55,11 @@ public QuickTimeAuto()
LOCATOR_PROTOCOL
+ ':'
+ inputDevice.uniqueID()),
new Format[] { new RGBFormat() });
new Format[]
{
new RGBFormat(),
//new YUVFormat(YUVFormat.YUV_420)
});
CaptureDeviceManager.addDevice(device);
captureDeviceInfoIsAdded = true;

@ -9,6 +9,8 @@
import java.awt.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
import java.util.List; // disambiguation
import javax.media.*;
import javax.media.control.*;
@ -35,6 +37,18 @@ public class DataSource
*/
private static final Logger logger = Logger.getLogger(DataSource.class);
/**
* The default width of <tt>DataSource</tt> when the associated
* <tt>QTCaptureDevice</tt> does not report the actual width.
*/
static final int DEFAULT_WIDTH = 640;
/**
* The default height of <tt>DataSource</tt> when the associated
* <tt>QTCaptureDevice</tt> does not report the actual height.
*/
static final int DEFAULT_HEIGHT = 480;
/**
* The <tt>QTCaptureSession</tt> which captures from {@link #device} and
* pushes media data to the <tt>PushBufferStream</tt>s of this
@ -48,6 +62,12 @@ public class DataSource
*/
private QTCaptureDevice device;
/**
* The list of <tt>Format</tt>s to be reported by <tt>DataSource</tt>
* instances as supported formats.
*/
private static Format[] supportedFormats;
/**
* Initializes a new <tt>DataSource</tt> instance.
*/
@ -255,6 +275,111 @@ protected Format getFormat(int streamIndex, Format oldValue)
return format;
}
/**
* Gets the <tt>Format</tt>s which are to be reported by a
* <tt>FormatControl</tt> as supported formats for a
* <tt>PushBufferStream</tt> at a specific zero-based index in the list of
* streams of this <tt>PushBufferDataSource</tt>.
*
* @param streamIndex the zero-based index of the <tt>PushBufferStream</tt>
* for which the specified <tt>FormatControl</tt> is to report the list of
* supported <tt>Format</tt>s
* @return an array of <tt>Format</tt>s to be reported by a
* <tt>FormatControl</tt> as the supported formats for the
* <tt>PushBufferStream</tt> at the specified <tt>streamIndex</tt> in the
* list of streams of this <tt>PushBufferDataSource</tt>
* @see AbstractPushBufferCaptureDevice#getSupportedFormats(int)
*/
@Override
protected Format[] getSupportedFormats(int streamIndex)
{
return getSupportedFormats(super.getSupportedFormats(streamIndex));
}
/**
* Gets a list of <tt>Format</tt>s which are more specific than given
* <tt>Format</tt>s with respect to video size. The implementation tries to
* come up with sane video sizes (for example, by looking for codecs which
* accept the encodings of the specified generic <tt>Format</tt>s and using
* their sizes if any).
*
* @param genericFormats the <tt>Format</tt>s from which more specific are
* to be derived
* @return a list of <tt>Format</tt>s which are more specific than the given
* <tt>Format</tt>s with respect to video size
*/
private static synchronized Format[] getSupportedFormats(
Format[] genericFormats)
{
if ((supportedFormats != null) && (supportedFormats.length > 0))
return supportedFormats.clone();
List<Format> specificFormats = new LinkedList<Format>();
for (Format genericFormat : genericFormats)
{
VideoFormat genericVideoFormat = (VideoFormat) genericFormat;
if (genericVideoFormat.getSize() == null)
{
// specificFormats
// .add(
// genericFormat
// .intersects(
// new VideoFormat(
// null,
// new Dimension(
// DEFAULT_WIDTH,
// DEFAULT_HEIGHT),
// Format.NOT_SPECIFIED,
// null,
// Format.NOT_SPECIFIED)));
@SuppressWarnings("unchecked")
Vector<String> codecs
= PlugInManager
.getPlugInList(
new VideoFormat(genericVideoFormat.getEncoding()),
null,
PlugInManager.CODEC);
for (String codec : codecs)
{
Format[] supportedInputFormats
= PlugInManager
.getSupportedInputFormats(
codec,
PlugInManager.CODEC);
for (Format supportedInputFormat : supportedInputFormats)
if (supportedInputFormat instanceof VideoFormat)
{
Dimension size
= ((VideoFormat) supportedInputFormat)
.getSize();
if (size != null)
specificFormats
.add(
genericFormat
.intersects(
new VideoFormat(
null,
size,
Format.NOT_SPECIFIED,
null,
Format.NOT_SPECIFIED)));
}
}
}
specificFormats.add(genericFormat);
}
supportedFormats
= specificFormats.toArray(new Format[specificFormats.size()]);
return supportedFormats.clone();
}
/**
* Sets the <tt>QTCaptureDevice</tt> which represents the media source of
* this <tt>DataSource</tt>.

@ -220,13 +220,13 @@ private Format getCaptureOutputFormat()
NSDictionary pixelBufferAttributes
= captureOutput.pixelBufferAttributes();
if ((pixelBufferAttributes != null)
&& (CVPixelFormatType.kCVPixelFormatType_32ARGB
== pixelBufferAttributes
.intForKey(
CVPixelBufferAttributeKey
.kCVPixelBufferPixelFormatTypeKey)))
if (pixelBufferAttributes != null)
{
int pixelFormatType
= pixelBufferAttributes
.intForKey(
CVPixelBufferAttributeKey
.kCVPixelBufferPixelFormatTypeKey);
int width
= pixelBufferAttributes
.intForKey(
@ -236,18 +236,46 @@ private Format getCaptureOutputFormat()
.intForKey(
CVPixelBufferAttributeKey.kCVPixelBufferHeightKey);
return
new RGBFormat(
((width == 0) && (height == 0)
? null
: new Dimension(width, height)),
Format.NOT_SPECIFIED,
Format.byteArray,
Format.NOT_SPECIFIED,
32,
2,
3,
4);
switch (pixelFormatType)
{
case CVPixelFormatType.kCVPixelFormatType_32ARGB:
return
new RGBFormat(
((width == 0) && (height == 0)
? null
: new Dimension(width, height)),
Format.NOT_SPECIFIED,
Format.byteArray,
Format.NOT_SPECIFIED,
32,
2,
3,
4);
case CVPixelFormatType.kCVPixelFormatType_420YpCbCr8Planar:
if ((width == 0) && (height == 0))
return new YUVFormat(YUVFormat.YUV_420);
else
{
int strideY = width;
int strideUV = strideY / 2;
int offsetY = 0;
int offsetU = strideY * height;
int offsetV = offsetU + strideUV * height / 2;
return
new YUVFormat(
new Dimension(width, height),
Format.NOT_SPECIFIED,
Format.byteArray,
Format.NOT_SPECIFIED,
YUVFormat.YUV_420,
strideY,
strideUV,
offsetY,
offsetU,
offsetV);
}
}
}
return null;
}
@ -327,14 +355,17 @@ private void setCaptureOutputFormat(Format format)
{
VideoFormat videoFormat = (VideoFormat) format;
Dimension size = videoFormat.getSize();
System.err.println(format);
/*
* FIXME Mac OS X Leopard does not seem to report the size of the
* QTCaptureDevice in its formatDescriptions early in its creation.
* The workaround presented here is to just force a specific size.
*/
if (size == null)
size = new Dimension(640, 480);
size
= new Dimension(
DataSource.DEFAULT_WIDTH,
DataSource.DEFAULT_HEIGHT);
NSMutableDictionary pixelBufferAttributes = null;
@ -361,6 +392,15 @@ private void setCaptureOutputFormat(Format format)
CVPixelFormatType.kCVPixelFormatType_32ARGB,
CVPixelBufferAttributeKey.kCVPixelBufferPixelFormatTypeKey);
}
else if (format.isSameEncoding(VideoFormat.YUV))
{
if (pixelBufferAttributes == null)
pixelBufferAttributes = new NSMutableDictionary();
pixelBufferAttributes
.setIntForKey(
CVPixelFormatType.kCVPixelFormatType_420YpCbCr8Planar,
CVPixelBufferAttributeKey.kCVPixelBufferPixelFormatTypeKey);
}
else
throw new IllegalArgumentException("format");

@ -7,14 +7,23 @@
package net.java.sip.communicator.impl.neomedia.quicktime;
/**
* Defines the types of <tt>CVPixelBuffer</tt>s to be output by
* <tt>QTCaptureDecompressedVideoOutput</tt>.
*
* @author Lubomir Marinov
*/
public final class CVPixelFormatType
{
/** 24 bit RGB */
public static final int kCVPixelFormatType_24RGB = 0x00000018;
/** 32 bit ARGB */
public static final int kCVPixelFormatType_32ARGB = 0x00000020;
/** Planar Component Y'CbCr 8-bit 4:2:0. */
public static final int kCVPixelFormatType_420YpCbCr8Planar = 0x79343230;
/**
* Prevents the initialization of <tt>CVPixelFormatType</tt> instances.
*/

Loading…
Cancel
Save