Implements a video CaptureDevice on Linux using the Video for Linux Two API Specification.

cusax-fix
Lyubomir Marinov 16 years ago
parent ed3f10a78b
commit 5d9691ed22

@ -0,0 +1,15 @@
JAVA_HOME?=/usr/lib/jvm/java-6-sun
ARCH=$(shell uname -m | sed -e s/x86_64/-64/ -e s/i.86//)
TARGET=../../../../lib/native/linux$(ARCH)/libjvideo4linux2.so
CC=gcc
CPPFLAGS=-DJNI_IMPLEMENTATION \
-fPIC \
-Wall -Wreturn-type \
-I$(JAVA_HOME)/include -I$(JAVA_HOME)/include/linux
LDFLAGS=-shared
LIBS=
$(TARGET): net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2.c net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2.h
$(CC) $(CPPFLAGS) $< $(LDFLAGS) -o $@ $(LIBS)

@ -0,0 +1,357 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
#include "net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2.h"
#include <fcntl.h>
#include <linux/videodev2.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <unistd.h>
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_close
(JNIEnv *jniEnv, jclass clazz, jint fd)
{
return close(fd);
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_free
(JNIEnv *jniEnv, jclass clazz, jlong ptr)
{
free((void *) ptr);
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_ioctl
(JNIEnv *jniEnv, jclass clazz, jint fd, jint request, jlong argp)
{
return ioctl(fd, request, (void *) argp);
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_memcpy
(JNIEnv *jniEnv, jclass clazz, jlong dest, jlong src, jint n)
{
return (jlong) memcpy((void *) dest, (const void *) src, n);
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_mmap
(JNIEnv *jniEnv, jclass clazz, jlong start, jint length, jint prot,
jint flags, jint fd, jlong offset)
{
return (jlong) mmap((void *) start, length, prot, flags, fd, offset);
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_munmap
(JNIEnv *jniEnv, jclass clazz, jlong start, jint length)
{
return munmap((void *) start, length);
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_open
(JNIEnv *jniEnv, jclass clazz, jstring deviceName, jint flags)
{
const char *deviceNameChars;
jint fd;
deviceNameChars
= (const char *) (*jniEnv)->GetStringUTFChars(jniEnv, deviceName, NULL);
if (deviceNameChars)
{
fd = open(deviceNameChars, flags);
(*jniEnv)->ReleaseStringUTFChars(jniEnv, deviceName, deviceNameChars);
}
else
fd = -1;
return fd;
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1alloc
(JNIEnv *jniEnv, jclass clazz, jint type)
{
struct v4l2_buffer *v4l2_buffer;
v4l2_buffer = malloc(sizeof(struct v4l2_buffer));
if (v4l2_buffer)
v4l2_buffer->type = type;
return (jlong) v4l2_buffer;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getBytesused
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_buffer)
{
return ((struct v4l2_buffer *) v4l2_buffer)->bytesused;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getIndex
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_buffer)
{
return ((struct v4l2_buffer *) v4l2_buffer)->index;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getLength
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_buffer)
{
return ((struct v4l2_buffer *) v4l2_buffer)->length;
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getMOffset
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_buffer)
{
return ((struct v4l2_buffer *) v4l2_buffer)->m.offset;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1setIndex
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_buffer, jint index)
{
((struct v4l2_buffer *) v4l2_buffer)->index = index;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1setMemory
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_buffer, jint memory)
{
((struct v4l2_buffer *) v4l2_buffer)->memory = memory;
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buf_1type_1alloc
(JNIEnv *jniEnv, jclass clazz, jint type)
{
enum v4l2_buf_type *v4l2_buf_type;
v4l2_buf_type = malloc(sizeof(enum v4l2_buf_type));
if (v4l2_buf_type)
(*v4l2_buf_type) = type;
return (jlong) v4l2_buf_type;
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1capability_1alloc
(JNIEnv *jniEnv, jclass clazz)
{
return (jlong) malloc(sizeof(struct v4l2_capability));
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1capability_1getCapabilities
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_capability)
{
return ((struct v4l2_capability *) v4l2_capability)->capabilities;
}
JNIEXPORT jstring JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1capability_1getCard
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_capability)
{
return
(*jniEnv)->NewStringUTF(
jniEnv,
(const char *)
(((struct v4l2_capability *) v4l2_capability)->card));
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1format_1alloc
(JNIEnv *jniEnv, jclass clazz, jint type)
{
struct v4l2_format *v4l2_format;
v4l2_format = malloc(sizeof(struct v4l2_format));
if (v4l2_format)
{
v4l2_format->type = type;
if (V4L2_BUF_TYPE_VIDEO_CAPTURE == type)
v4l2_format->fmt.pix.priv = 0;
}
return (jlong) v4l2_format;
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1format_1getFmtPix
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_format)
{
return (jlong) &(((struct v4l2_format *) v4l2_format)->fmt.pix);
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_1PIX_1FMT_1RGB24
(JNIEnv *jniEnv, jclass clazz)
{
return (jint) V4L2_PIX_FMT_RGB24;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_1PIX_1FMT_1UYVY
(JNIEnv *jniEnv, jclass clazz)
{
return (jint) V4L2_PIX_FMT_UYVY;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1getHeight
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_pix_format)
{
return ((struct v4l2_pix_format *) v4l2_pix_format)->height;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1getPixelformat
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_pix_format)
{
return ((struct v4l2_pix_format *) v4l2_pix_format)->pixelformat;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1getWidth
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_pix_format)
{
return ((struct v4l2_pix_format *) v4l2_pix_format)->width;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setBytesperline
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_pix_format, jint bytesperline)
{
((struct v4l2_pix_format *) v4l2_pix_format)->bytesperline = bytesperline;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setField
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_pix_format, jint field)
{
((struct v4l2_pix_format *) v4l2_pix_format)->field = field;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setPixelformat
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_pix_format, jint pixelformat)
{
((struct v4l2_pix_format *) v4l2_pix_format)->pixelformat = pixelformat;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setWidthAndHeight
(JNIEnv *jniEnv, jclass clazz,
jlong v4l2_pix_format,
jint width, jint height)
{
struct v4l2_pix_format *ptr;
ptr = (struct v4l2_pix_format *) v4l2_pix_format;
ptr->width = width;
ptr->height = height;
}
JNIEXPORT jlong JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1alloc
(JNIEnv *jniEnv, jclass clazz, jint type)
{
struct v4l2_requestbuffers *v4l2_requestbuffers;
v4l2_requestbuffers = malloc(sizeof(struct v4l2_requestbuffers));
if (v4l2_requestbuffers)
v4l2_requestbuffers->type = type;
return (jlong) v4l2_requestbuffers;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1getCount
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_requestbuffers)
{
return ((struct v4l2_requestbuffers *) v4l2_requestbuffers)->count;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1setCount
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_requestbuffers, jint count)
{
((struct v4l2_requestbuffers *) v4l2_requestbuffers)->count = count;
}
JNIEXPORT void JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1setMemory
(JNIEnv *jniEnv, jclass clazz, jlong v4l2_requestbuffers, jint memory)
{
((struct v4l2_requestbuffers *) v4l2_requestbuffers)->memory = memory;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1DQBUF
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_DQBUF;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1G_1FMT
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_G_FMT;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1QBUF
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_QBUF;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1QUERYBUF
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_QUERYBUF;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1QUERYCAP
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_QUERYCAP;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1REQBUFS
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_REQBUFS;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1S_1FMT
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_S_FMT;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1STREAMOFF
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_STREAMOFF;
}
JNIEXPORT jint JNICALL
Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1STREAMON
(JNIEnv *jniEnv, jclass clazz)
{
return VIDIOC_STREAMON;
}

@ -0,0 +1,371 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2 */
#ifndef _Included_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
#define _Included_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
#ifdef __cplusplus
extern "C" {
#endif
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_MAP_SHARED
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_MAP_SHARED 1L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_O_NONBLOCK
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_O_NONBLOCK 2048L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_O_RDWR
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_O_RDWR 2L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_PROT_READ
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_PROT_READ 1L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_PROT_WRITE
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_PROT_WRITE 2L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_BUF_TYPE_VIDEO_CAPTURE
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_BUF_TYPE_VIDEO_CAPTURE 1L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_CAP_STREAMING
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_CAP_STREAMING 67108864L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_CAP_VIDEO_CAPTURE
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_CAP_VIDEO_CAPTURE 1L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_FIELD_NONE
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_FIELD_NONE 1L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_MEMORY_MMAP
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_MEMORY_MMAP 1L
#undef net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_MEMORY_USERPTR
#define net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_MEMORY_USERPTR 2L
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: close
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_close
(JNIEnv *, jclass, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: free
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_free
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: ioctl
* Signature: (IIJ)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_ioctl
(JNIEnv *, jclass, jint, jint, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: memcpy
* Signature: (JJI)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_memcpy
(JNIEnv *, jclass, jlong, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: mmap
* Signature: (JIIIIJ)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_mmap
(JNIEnv *, jclass, jlong, jint, jint, jint, jint, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: munmap
* Signature: (JI)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_munmap
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: open
* Signature: (Ljava/lang/String;I)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_open
(JNIEnv *, jclass, jstring, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_alloc
* Signature: (I)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1alloc
(JNIEnv *, jclass, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_getBytesused
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getBytesused
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_getIndex
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getIndex
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_getLength
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getLength
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_getMOffset
* Signature: (J)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1getMOffset
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_setIndex
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1setIndex
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buffer_setMemory
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buffer_1setMemory
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_buf_type_alloc
* Signature: (I)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1buf_1type_1alloc
(JNIEnv *, jclass, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_capability_alloc
* Signature: ()J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1capability_1alloc
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_capability_getCapabilities
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1capability_1getCapabilities
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_capability_getCard
* Signature: (J)Ljava/lang/String;
*/
JNIEXPORT jstring JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1capability_1getCard
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_format_alloc
* Signature: (I)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1format_1alloc
(JNIEnv *, jclass, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_format_getFmtPix
* Signature: (J)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1format_1getFmtPix
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: V4L2_PIX_FMT_RGB24
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_1PIX_1FMT_1RGB24
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: V4L2_PIX_FMT_UYVY
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_V4L2_1PIX_1FMT_1UYVY
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_getHeight
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1getHeight
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_getPixelformat
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1getPixelformat
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_getWidth
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1getWidth
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_setBytesperline
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setBytesperline
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_setField
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setField
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_setPixelformat
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setPixelformat
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_pix_format_setWidthAndHeight
* Signature: (JII)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1pix_1format_1setWidthAndHeight
(JNIEnv *, jclass, jlong, jint, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_requestbuffers_alloc
* Signature: (I)J
*/
JNIEXPORT jlong JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1alloc
(JNIEnv *, jclass, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_requestbuffers_getCount
* Signature: (J)I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1getCount
(JNIEnv *, jclass, jlong);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_requestbuffers_setCount
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1setCount
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: v4l2_requestbuffers_setMemory
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_v4l2_1requestbuffers_1setMemory
(JNIEnv *, jclass, jlong, jint);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_DQBUF
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1DQBUF
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_G_FMT
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1G_1FMT
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_QBUF
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1QBUF
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_QUERYBUF
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1QUERYBUF
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_QUERYCAP
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1QUERYCAP
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_REQBUFS
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1REQBUFS
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_S_FMT
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1S_1FMT
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_STREAMOFF
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1STREAMOFF
(JNIEnv *, jclass);
/*
* Class: net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2
* Method: VIDIOC_STREAMON
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_net_java_sip_communicator_impl_neomedia_jmfext_media_protocol_video4linux2_Video4Linux2_VIDIOC_1STREAMON
(JNIEnv *, jclass);
#ifdef __cplusplus
}
#endif
#endif

@ -484,18 +484,18 @@ public ZrtpControl createZrtpControl()
*/
public List<ScreenDevice> getAvailableScreenDevices()
{
List<ScreenDevice> ret = new ArrayList<ScreenDevice>();
ScreenDevice screens[] = ScreenDeviceImpl.getAvailableScreenDevice();
ScreenDevice screens[] = ScreenDeviceImpl.getAvailableScreenDevice();
List<ScreenDevice> screenList;
if(screens != null)
if (screens != null)
{
/* populates screen list */
for(ScreenDevice sc : screens)
{
ret.add(sc);
}
screenList = new ArrayList<ScreenDevice>(screens.length);
for (ScreenDevice screen : screens)
screenList.add(screen);
}
return ret;
else
screenList = new ArrayList<ScreenDevice>();
return screenList;
}
/**
@ -505,24 +505,23 @@ public List<ScreenDevice> getAvailableScreenDevices()
*/
public ScreenDevice getDefaultScreenDevice()
{
List<ScreenDevice> screens = getAvailableScreenDevices();
ScreenDevice best = null;
int width = 0;
int height = 0;
List<ScreenDevice> screens = getAvailableScreenDevices();
int width = 0;
int height = 0;
ScreenDevice best = null;
for(ScreenDevice sc : screens)
{
java.awt.Dimension res = sc.getSize();
if(res != null && (width < res.getSize().width ||
height < res.getSize().height))
for (ScreenDevice sc : screens)
{
width = res.width;
height = res.height;
best = sc;
}
}
java.awt.Dimension res = sc.getSize();
return best;
if ((res != null)
&& ((width < res.width) || (height < res.height)))
{
width = res.width;
height = res.height;
best = sc;
}
}
return best;
}
}

@ -50,7 +50,7 @@ public class Constants
* On Mac OS X, the Apple iSight camera reports two sizes 640x480 and
* 320x240 if we use the default size 352x288 we must use source format
* 640x480 in this situation we suffer from high cpu usage as every
* frame is scaled, so we use the non standard format 320x240.
* frame is scaled, so we use the non-standard format 320x240.
*/
if (OSUtils.IS_MAC)
{

@ -146,7 +146,7 @@ public int getPixFmt()
*
* @param format the matching <tt>Format</tt> to intersect with this one
* @return a <tt>Format</tt> with its attributes set to the attributes
* common to this instane and the specified <tt>format</tt>
* common to this instance and the specified <tt>format</tt>
*/
@Override
public Format intersects(Format format)
@ -186,7 +186,7 @@ public boolean matches(Format format)
return
(pixFmt == NOT_SPECIFIED
|| avFrameFormat.pixFmt == NOT_SPECIFIED
|| (avFrameFormat.pixFmt == NOT_SPECIFIED)
|| (pixFmt == avFrameFormat.pixFmt));
}
}

@ -17,8 +17,8 @@ public class ByteBuffer
{
/**
* The maximum number of bytes which can be written into the native
* memory represented by this instance.
* The maximum number of bytes which can be written into the native memory
* represented by this instance.
*/
public final int capacity;
@ -29,8 +29,8 @@ public class ByteBuffer
private boolean free;
/**
* The number of bytes of valid data that the native memory represented
* by this instance contains.
* The number of bytes of valid data that the native memory represented by
* this instance contains.
*/
private int length;
@ -43,8 +43,8 @@ public class ByteBuffer
* Initializes a new <tt>ByteBuffer</tt> instance with a specific
* <tt>capacity</tt>.
*
* @param capacity the maximum number of bytes which can be written into
* the native memory represented by the new instance
* @param capacity the maximum number of bytes which can be written into the
* native memory represented by the new instance
*/
public ByteBuffer(int capacity)
{
@ -65,8 +65,8 @@ public ByteBuffer(int capacity)
}
/**
* Gets the number of bytes of valid data that the native memory
* represented by this instance contains.
* Gets the number of bytes of valid data that the native memory represented
* by this instance contains.
*
* @return the number of bytes of valid data that the native memory
* represented by this instance contains
@ -79,9 +79,9 @@ public int getLength()
/**
* Determines whether this instance is free to be written bytes into.
*
* @return <tt>true</tt> if this instance is free to be written bytes
* into or <tt>false</tt> is the native memory represented by this
* instance is already is use
* @return <tt>true</tt> if this instance is free to be written bytes into
* or <tt>false</tt> is the native memory represented by this instance is
* already is use
*/
public boolean isFree()
{
@ -89,11 +89,11 @@ public boolean isFree()
}
/**
* Sets the indicator which determines whether this instance is free to
* be written bytes into.
* Sets the indicator which determines whether this instance is free to be
* written bytes into.
*
* @param free <tt>true</tt> if this instance is to be made available
* for writing bytes into; otherwise, <tt>false</tt>
* @param free <tt>true</tt> if this instance is to be made available for
* writing bytes into; otherwise, <tt>false</tt>
*/
public void setFree(boolean free)
{
@ -103,14 +103,14 @@ public void setFree(boolean free)
}
/**
* Sets the number of bytes of valid data that the native memory
* represented by this instance contains.
* Sets the number of bytes of valid data that the native memory represented
* by this instance contains.
*
* @param length the number of bytes of valid data that the native
* memory represented by this instance contains
* @param length the number of bytes of valid data that the native memory
* represented by this instance contains
*/
public void setLength(int length)
{
this.length = length;
}
}
}

@ -58,6 +58,8 @@ public class FFmpeg
*/
public static final int PIX_FMT_RGB32_1;
public static final int PIX_FMT_UYVY422 = 17;
public static final int PIX_FMT_YUV420P;
public static final int SWS_BICUBIC = 4;

@ -0,0 +1,125 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.codec.video;
import java.awt.*;
import javax.media.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.*;
/**
* Represents an <tt>AVFrame</tt> used to provide captured media data in native
* format without representing the very frame data in the Java heap. Since the
* user may not know when the <tt>AVFrame</tt> instances are really safe for
* deallocation, <tt>FinalizableAVFrame</tt> relies on the Java finalization
* mechanism to reclaim the represented native memory.
*
* @author Lubomir Marinov
*/
public class FinalizableAVFrame
extends AVFrame
{
/**
* The indicator which determines whether the native memory represented by
* this instance has already been freed/deallocated.
*/
private boolean freed = false;
/**
* Initializes a new <tt>FinalizableAVFrame</tt> instance which is to
* allocate a new native FFmpeg <tt>AVFrame</tt> and represent it.
*/
public FinalizableAVFrame()
{
super(FFmpeg.avcodec_alloc_frame());
}
/**
* Deallocates the native memory represented by this instance.
*
* @see Object#finalize()
*/
@Override
protected void finalize()
throws Throwable
{
try
{
if (!freed)
{
long ptr = getPtr();
long bufferPtr = FFmpeg.avpicture_get_data0(ptr);
if (bufferPtr != 0)
freeData0(bufferPtr);
FFmpeg.av_free(ptr);
freed = true;
}
}
finally
{
super.finalize();
}
}
/**
* Frees the memory pointed to by the <tt>data0</tt> member of the native
* <tt>AVFrame</tt>.
*/
protected void freeData0(long data0)
{
FFmpeg.av_free(data0);
}
public static void read(
Buffer buffer,
Format format,
ByteBuffer data,
final ByteBufferPool byteBufferPool)
{
Object bufferData = buffer.getData();
AVFrame frame;
long framePtr;
long bufferPtrToReturnFree;
if (bufferData instanceof AVFrame)
{
frame = (AVFrame) bufferData;
framePtr = frame.getPtr();
bufferPtrToReturnFree = FFmpeg.avpicture_get_data0(framePtr);
}
else
{
frame
= new FinalizableAVFrame()
{
@Override
protected void freeData0(long data0)
{
byteBufferPool.returnFreeBuffer(data0);
}
};
buffer.setData(frame);
framePtr = frame.getPtr();
bufferPtrToReturnFree = 0;
}
AVFrameFormat frameFormat = (AVFrameFormat) format;
Dimension frameSize = frameFormat.getSize();
FFmpeg.avpicture_fill(
framePtr,
data.ptr,
frameFormat.getPixFmt(),
frameSize.width, frameSize.height);
if (bufferPtrToReturnFree != 0)
byteBufferPool.returnFreeBuffer(bufferPtrToReturnFree);
}
}

@ -12,8 +12,9 @@
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.portaudio.*;
import net.java.sip.communicator.impl.neomedia.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.portaudio.*;
import net.java.sip.communicator.service.configuration.*;
import net.java.sip.communicator.util.*;
@ -285,16 +286,24 @@ else if (audioCaptureDevices.length < 1)
else
{
logger.info("Scanning for configured Video Devices.");
videoCaptureDevice =
extractConfiguredVideoCaptureDevice(VideoFormat.RGB);
// no RGB camera found. And what about YUV ?
if (videoCaptureDevice == null)
Format[] formats
= new Format[]
{
new AVFrameFormat(),
new VideoFormat(VideoFormat.RGB),
new VideoFormat(VideoFormat.YUV)
};
for (Format format : formats)
{
videoCaptureDevice =
extractConfiguredVideoCaptureDevice(VideoFormat.YUV);
if (videoCaptureDevice == null)
logger.info("No Video Device was found.");
videoCaptureDevice
= extractConfiguredVideoCaptureDevice(format);
if (videoCaptureDevice != null)
break;
}
if (videoCaptureDevice == null)
logger.info("No Video Device was found.");
}
}
@ -304,17 +313,17 @@ else if (audioCaptureDevices.length < 1)
* @param format the output format of the video format.
* @return CaptureDeviceInfo for the video device.
*/
private CaptureDeviceInfo extractConfiguredVideoCaptureDevice(String format)
private CaptureDeviceInfo extractConfiguredVideoCaptureDevice(Format format)
{
List<CaptureDeviceInfo> videoCaptureDevices =
CaptureDeviceManager.getDeviceList(new VideoFormat(format));
List<CaptureDeviceInfo> videoCaptureDevices
= CaptureDeviceManager.getDeviceList(format);
CaptureDeviceInfo videoCaptureDevice = null;
if (videoCaptureDevices.size() > 0)
{
String videoDevName
= NeomediaActivator
.getConfigurationService().getString(PROP_VIDEO_DEVICE);
= NeomediaActivator.getConfigurationService()
.getString(PROP_VIDEO_DEVICE);
if (videoDevName == null)
videoCaptureDevice = videoCaptureDevices.get(0);
@ -331,8 +340,14 @@ private CaptureDeviceInfo extractConfiguredVideoCaptureDevice(String format)
}
if (videoCaptureDevice != null)
logger.info("Found " + videoCaptureDevice.getName()
+ " as an " + format + " Video Device.");
{
logger.info(
"Found "
+ videoCaptureDevice.getName()
+ " as a "
+ format
+ " Video Device.");
}
}
return videoCaptureDevice;
}
@ -375,7 +390,6 @@ public CaptureDeviceInfo[] getAvailableAudioCaptureDevices()
*
* @param soundSystem
* filter capture devices only from the supplied audio system.
*
* @return an array of <code>CaptureDeviceInfo</code> describing the audio
* capture devices available through this
* <code>DeviceConfiguration</code>
@ -383,6 +397,7 @@ public CaptureDeviceInfo[] getAvailableAudioCaptureDevices()
public CaptureDeviceInfo[] getAvailableAudioCaptureDevices(String soundSystem)
{
String protocol = null;
if(soundSystem.equals(AUDIO_SYSTEM_JAVASOUND))
protocol = "javasound";
else if(soundSystem.equals(AUDIO_SYSTEM_PORTAUDIO))
@ -392,17 +407,13 @@ else if(soundSystem.equals(AUDIO_SYSTEM_PORTAUDIO))
if(protocol != null)
{
CaptureDeviceInfo[] all = getAvailableAudioCaptureDevices();
for(int i = 0; i < all.length; i++)
for(CaptureDeviceInfo cDeviceInfo
: getAvailableAudioCaptureDevices())
{
CaptureDeviceInfo cDeviceInfo = all[i];
if(cDeviceInfo.getLocator().getProtocol().equals(protocol))
{
res.add(cDeviceInfo);
}
}
}
return res.toArray(NO_CAPTURE_DEVICES);
}
@ -429,13 +440,21 @@ public CaptureDeviceInfo[] getAvailableAudioPlaybackDevices()
*/
public CaptureDeviceInfo[] getAvailableVideoCaptureDevices()
{
Format[] formats
= new Format[]
{
new AVFrameFormat(),
new VideoFormat(VideoFormat.RGB),
new VideoFormat(VideoFormat.YUV)
};
Set<CaptureDeviceInfo> videoCaptureDevices =
new HashSet<CaptureDeviceInfo>();
videoCaptureDevices.addAll(CaptureDeviceManager
.getDeviceList(new VideoFormat(VideoFormat.RGB)));
videoCaptureDevices.addAll(CaptureDeviceManager
.getDeviceList(new VideoFormat(VideoFormat.YUV)));
for (Format format : formats)
{
videoCaptureDevices.addAll(
CaptureDeviceManager.getDeviceList(format));
}
return videoCaptureDevices.toArray(NO_CAPTURE_DEVICES);
}
@ -501,16 +520,13 @@ public void setAudioCaptureDevice(CaptureDeviceInfo device, boolean save)
if(save)
{
ConfigurationService config
= NeomediaActivator.getConfigurationService();
if (audioCaptureDevice != null)
{
config.setProperty(PROP_AUDIO_DEVICE, audioCaptureDevice
.getName());
}
else
config.setProperty(PROP_AUDIO_DEVICE, null);
NeomediaActivator
.getConfigurationService()
.setProperty(
PROP_AUDIO_DEVICE,
(audioCaptureDevice == null)
? null
: audioCaptureDevice.getName());
}
firePropertyChange(AUDIO_CAPTURE_DEVICE, oldDevice, device);
@ -595,11 +611,7 @@ else if(cdi.getLocator().getProtocol().equals("portaudio"))
res = asName;
}
}
if(res == null)
res = AUDIO_SYSTEM_NONE;
return res;
return (res == null) ? AUDIO_SYSTEM_NONE : res;
}
/**

@ -186,8 +186,8 @@ private void detectCaptureDevices()
fmjVideoAvailable = false;
}
// QuickTime
if (OSUtils.IS_MAC)
if (OSUtils.IS_MAC) // QuickTime
{
try
{
new QuickTimeAuto();
@ -196,6 +196,18 @@ private void detectCaptureDevices()
{
logger.debug("No QuickTime detected: " + t.getMessage(), t);
}
}
else if (OSUtils.IS_LINUX) // Video4Linux2
{
try
{
new Video4Linux2Auto();
}
catch (Throwable t)
{
logger.debug("No Video4Linux2 detected: " + t.getMessage(), t);
}
}
/* Desktop capture */
try

@ -58,10 +58,7 @@ public QuickTimeAuto()
+ inputDevice.uniqueID()),
new Format[]
{
new AVFrameFormat(
null,
Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_ARGB),
new AVFrameFormat(FFmpeg.PIX_FMT_ARGB),
new RGBFormat()
});

@ -0,0 +1,200 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.device;
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.video4linux2.*;
/**
* Discovers and registers <tt>CaptureDevice</tt>s which implement the Video for
* Linux Two API Specification with JMF.
*
* @author Lubomir Marinov
*/
public class Video4Linux2Auto
{
/**
* The protocol of the <tt>MediaLocator</tt>s identifying
* <tt>CaptureDevice</tt> which implement the Video for Linux Two API
* Specification.
*/
public static final String LOCATOR_PROTOCOL = "video4linux2";
/**
* Initializes a new <tt>Video4Linux2Auto</tt> instance which discovers and
* registers <tt>CaptureDevice</tt>s which implement the Video for Linux Two
* API Specification with JMF.
*
* @throws Exception if anything goes wrong while discovering and
* registering <tt>CaptureDevice</tt>s which implement the Video for Linux
* Two API Specification with JMF
*/
public Video4Linux2Auto()
throws Exception
{
String baseDeviceName = "/dev/video";
boolean captureDeviceInfoIsAdded = discoverAndRegister(baseDeviceName);
for (int deviceMinorNumber = 0;
deviceMinorNumber <= 63;
deviceMinorNumber++)
{
captureDeviceInfoIsAdded
= discoverAndRegister(baseDeviceName + deviceMinorNumber)
|| captureDeviceInfoIsAdded;
}
if (captureDeviceInfoIsAdded)
CaptureDeviceManager.commit();
}
/**
* Discovers and registers a <tt>CaptureDevice</tt> implementing the Video
* for Linux Two API Specification with a specific device name with JMF.
*
* @param deviceName the device name of a candidate for a
* <tt>CaptureDevice</tt> implementing the Video for Linux Two API
* Specification to be discovered and registered with JMF
* @return <tt>true</tt> if a <tt>CaptureDeviceInfo</tt> for the specified
* <tt>CaptureDevice</tt> has been added to <tt>CaptureDeviceManager</tt>;
* otherwise, <tt>false</tt>
* @throws Exception if anything goes wrong while discovering and
* registering the specified <tt>CaptureDevice</tt> with JMF
*/
private boolean discoverAndRegister(String deviceName)
throws Exception
{
int fd = Video4Linux2.open(deviceName, Video4Linux2.O_RDWR);
boolean captureDeviceInfoIsAdded = false;
if (-1 != fd)
{
try
{
long v4l2_capability = Video4Linux2.v4l2_capability_alloc();
if (0 != v4l2_capability)
{
try
{
if ((Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_QUERYCAP,
v4l2_capability)
!= -1)
&& ((Video4Linux2
.v4l2_capability_getCapabilities(
v4l2_capability)
& Video4Linux2.V4L2_CAP_VIDEO_CAPTURE)
== Video4Linux2.V4L2_CAP_VIDEO_CAPTURE))
{
captureDeviceInfoIsAdded
= register(deviceName, fd, v4l2_capability);
}
}
finally
{
Video4Linux2.free(v4l2_capability);
}
}
}
finally
{
Video4Linux2.close(fd);
}
}
return captureDeviceInfoIsAdded;
}
/**
* Registers a <tt>CaptureDevice</tt> implementing the Video for Linux Two
* API Specification with a specific device name, a specific <tt>open()</tt>
* file descriptor and a specific <tt>v4l2_capability</tt> with JMF.
*
* @param deviceName
* @param fd
* @param v4l2_capability
* @return <tt>true</tt> if a <tt>CaptureDeviceInfo</tt> for the specified
* <tt>CaptureDevice</tt> has been added to <tt>CaptureDeviceManager</tt>;
* otherwise, <tt>false</tt>
* @throws Exception if anything goes wrong while registering the specified
* <tt>CaptureDevice</tt> with JMF
*/
private boolean register(String deviceName, int fd, long v4l2_capability)
throws Exception
{
long v4l2_format
= Video4Linux2.v4l2_format_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
int pixelformat = 0;
if (0 != v4l2_format)
{
try
{
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_G_FMT,
v4l2_format)
!= -1)
{
long fmtPix
= Video4Linux2.v4l2_format_getFmtPix(v4l2_format);
pixelformat
= Video4Linux2.v4l2_pix_format_getPixelformat(fmtPix);
if ((Video4Linux2.V4L2_PIX_FMT_RGB24 != pixelformat)
|| (Video4Linux2.V4L2_PIX_FMT_UYVY != pixelformat))
{
Video4Linux2.v4l2_pix_format_setPixelformat(
fmtPix,
Video4Linux2.V4L2_PIX_FMT_RGB24);
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_S_FMT,
v4l2_format)
!= -1)
{
pixelformat
= Video4Linux2.v4l2_pix_format_getPixelformat(
fmtPix);
}
}
}
}
finally
{
Video4Linux2.free(v4l2_format);
}
}
Format format;
if (Video4Linux2.V4L2_PIX_FMT_RGB24 == pixelformat)
format = new RGBFormat();
else if (Video4Linux2.V4L2_PIX_FMT_UYVY == pixelformat)
format = new AVFrameFormat(FFmpeg.PIX_FMT_UYVY422);
else
return false;
String name = Video4Linux2.v4l2_capability_getCard(v4l2_capability);
if ((name == null) || (name.length() == 0))
name = deviceName;
else
name += " (" + deviceName + ")";
CaptureDeviceManager.addDevice(
new CaptureDeviceInfo(
name,
new MediaLocator(LOCATOR_PROTOCOL + ":" + deviceName),
new Format[] { format }));
return true;
}
}

@ -227,7 +227,7 @@ protected FormatControl[] createFormatControls()
}
/**
* Create a new <tt>PullBufferStream</tt> which is to be at a specific
* Creates a new <tt>PullBufferStream</tt> which is to be at a specific
* zero-based index in the list of streams of this
* <tt>PullBufferDataSource</tt>. The <tt>Format</tt>-related information of
* the new instance is to be abstracted by a specific

@ -125,16 +125,6 @@ public boolean endOfStream()
{
return false;
}
/**
* Determines if read will block.
*
* @return <tt>true</tt> if read block, <tt>false</tt> otherwise
*/
public boolean willReadBlock()
{
return true;
}
/**
* Gets a <tt>ContentDescriptor</tt> which describes the type of the content
@ -238,4 +228,14 @@ public void stop()
throws IOException
{
}
/**
* Determines if read will block.
*
* @return <tt>true</tt> if read block, <tt>false</tt> otherwise
*/
public boolean willReadBlock()
{
return true;
}
}

@ -0,0 +1,166 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.jmfext.media.protocol;
import java.util.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.util.*;
/**
* Represents a pool of <tt>ByteBuffer</tt>s which reduces the allocations and
* deallocations of <tt>ByteBuffer</tt>s in the Java heap and of native memory
* in the native heap.
*
* @author Lubomir Marinov
*/
public class ByteBufferPool
{
/**
* The <tt>Logger</tt> used by the <tt>ByteBufferPool</tt> class and its
* instances for logging output.
*/
private static final Logger logger = Logger.getLogger(ByteBufferPool.class);
/**
* The <tt>ByteBuffer</tt>s which are managed by this
* <tt>ByteBufferPool</tt>.
*/
private final List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
/**
* The indicator which determines whether this <tt>ByteBufferPool</tt> has
* been closed. Introduced to determine when <tt>ByteBuffer</tt>s are to be
* disposed of and no longer be pooled.
*/
private boolean closed = false;
/**
* Closes this <tt>ByteBufferPool</tt> i.e. releases the resource allocated
* by this <tt>ByteBufferPool</tt> during its existence and prepares it to
* be garbage collected.
*/
public void close()
{
synchronized (buffers)
{
closed = true;
Iterator<ByteBuffer> bufferIter = buffers.iterator();
boolean loggerIsTraceEnabled = logger.isTraceEnabled();
int leakedCount = 0;
while (bufferIter.hasNext())
{
ByteBuffer buffer = bufferIter.next();
if (buffer.isFree())
{
bufferIter.remove();
FFmpeg.av_free(buffer.ptr);
}
else if (loggerIsTraceEnabled)
leakedCount++;
}
if (loggerIsTraceEnabled)
{
logger.trace(
"Leaking " + leakedCount + " ByteBuffer instances.");
}
}
}
/**
* Gets a <tt>ByteBuffer</tt> out of the pool of free <tt>ByteBuffer</tt>s
* (i.e. <tt>ByteBuffer</tt>s ready for writing captured media data into
* them) which is capable to receiving at least <tt>capacity</tt> number of
* bytes.
*
* @param capacity the minimal number of bytes that the returned
* <tt>ByteBuffer</tt> is to be capable of receiving
* @return a <tt>ByteBuffer</tt> which is ready for writing captured media
* data into and which is capable of receiving at least <tt>capacity</tt>
* number of bytes
*/
public ByteBuffer getFreeBuffer(int capacity)
{
synchronized (buffers)
{
if (closed)
return null;
int bufferCount = buffers.size();
ByteBuffer freeBuffer = null;
/*
* XXX Pad with FF_INPUT_BUFFER_PADDING_SIZE or hell will break
* loose.
*/
capacity += FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE;
for (int bufferIndex = 0; bufferIndex < bufferCount; bufferIndex++)
{
ByteBuffer buffer = buffers.get(bufferIndex);
if (buffer.isFree() && (buffer.capacity >= capacity))
{
freeBuffer = buffer;
break;
}
}
if (freeBuffer == null)
{
freeBuffer = new ByteBuffer(capacity);
buffers.add(freeBuffer);
}
freeBuffer.setFree(false);
return freeBuffer;
}
}
/**
* Returns a specific <tt>ByteBuffer</tt> into the pool of free
* <tt>ByteBuffer</tt>s (i.e. <tt>ByteBuffer</tt>s ready for writing
* captured media data into them).
*
* @param buffer the <tt>ByteBuffer</tt> to be returned into the pool of
* free <tt>ByteBuffer</tt>s
*/
public void returnFreeBuffer(ByteBuffer buffer)
{
synchronized (buffers)
{
buffer.setFree(true);
if (closed && buffers.remove(buffer))
FFmpeg.av_free(buffer.ptr);
}
}
/**
* Returns a specific <tt>ByteBuffer</tt> given by the pointer to the native
* memory that it represents into the pool of free <tt>ByteBuffer</tt>s
* (i.e. <tt>ByteBuffer</tt>s ready for writing captured media data into
* them).
*
* @param bufferPtr the pointer to the native memory represented by the
* <tt>ByteBuffer</tt> to be returned into the pool of free
* <tt>ByteBuffer</tt>s
*/
public void returnFreeBuffer(long bufferPtr)
{
synchronized (buffers)
{
for (ByteBuffer buffer : buffers)
if (buffer.ptr == bufferPtr)
{
returnFreeBuffer(buffer);
break;
}
}
}
}

@ -16,8 +16,8 @@
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.*;
/**
* DataSource for our image streaming (which is used for
* Desktop streaming).
* Implements <tt>CaptureDevice</tt> and <tt>DataSource</tt> for the purposes of
* image and desktop streaming.
*
* @author Sebastien Vincent
* @author Lubomir Marinov
@ -28,35 +28,40 @@ public class DataSource
{
/**
* Array of supported formats.
* The list of supported formats.
*/
private static final Format[] formats
= new Format[]
{
new AVFrameFormat(
Toolkit.getDefaultToolkit().getScreenSize(),
Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_ARGB),
new RGBFormat(
Toolkit.getDefaultToolkit().getScreenSize(), // size
Format.NOT_SPECIFIED, // maxDataLength
Format.byteArray, // dataType
Format.NOT_SPECIFIED, // frameRate
32, // bitsPerPixel
2, // red
3, // green
4) // blue
};
private static final Format[] FORMATS;
static
{
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
FORMATS
= new Format[]
{
new AVFrameFormat(
screenSize,
Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_ARGB),
new RGBFormat(
screenSize, // size
Format.NOT_SPECIFIED, // maxDataLength
Format.byteArray, // dataType
Format.NOT_SPECIFIED, // frameRate
32, // bitsPerPixel
2 /* red */, 3 /* green */, 4 /* blue */)
};
}
/**
* Constructor.
* Initializes a new <tt>DataSource</tt> instance.
*/
public DataSource()
{
}
/**
* Constructor.
* Initializes a new <tt>DataSource</tt> instance.
*
* @param locator associated <tt>MediaLocator</tt>
*/
@ -66,7 +71,7 @@ public DataSource(MediaLocator locator)
}
/**
* Create a new <tt>PullBufferStream</tt> which is to be at a specific
* Creates a new <tt>PullBufferStream</tt> which is to be at a specific
* zero-based index in the list of streams of this
* <tt>PullBufferDataSource</tt>. The <tt>Format</tt>-related information of
* the new instance is to be abstracted by a specific
@ -90,12 +95,12 @@ protected AbstractPullBufferStream createStream(
}
/**
* Get supported formats.
* Gets the list of supported formats.
*
* @return supported formats
* @return the list of supported formats
*/
public static Format[] getFormats()
{
return formats;
return FORMATS;
}
}

@ -315,61 +315,4 @@ public byte[] readScreen(byte output[])
scaledScreen = null;
return data;
}
/**
* Represents an <tt>AVFrame</tt> used by this instance to provide captured
* media data in native format without representing the very frame data in
* the Java heap. Since this instance cannot know when the <tt>AVFrame</tt>
* instances are really safe for deallocation, <tt>FinalizableAVFrame</tt>
* relies on the Java finalization mechanism to reclaim the represented
* native memory.
*/
public class FinalizableAVFrame
extends AVFrame
{
/**
* The indicator which determines whether the native memory represented
* by this instance has already been freed/deallocated.
*/
private boolean freed = false;
/**
* Initializes a new <tt>FinalizableAVFrame</tt> instance which is to
* allocate a new native FFmpeg <tt>AVFrame</tt> and represent it.
*/
public FinalizableAVFrame()
{
super(FFmpeg.avcodec_alloc_frame());
}
/**
* Deallocates the native memory represented by this instance.
*
* @see Object#finalize()
*/
@Override
protected void finalize()
throws Throwable
{
try
{
if (!freed)
{
long ptr = getPtr();
long bufferPtr = FFmpeg.avpicture_get_data0(ptr);
if(bufferPtr != 0)
FFmpeg.av_free(bufferPtr);
FFmpeg.av_free(ptr);
freed = true;
}
}
finally
{
super.finalize();
}
}
}
}

@ -87,7 +87,7 @@ public DataSource(MediaLocator locator)
}
/**
* Create a new <tt>PushBufferStream</tt> which is to be at a specific
* Creates a new <tt>PushBufferStream</tt> which is to be at a specific
* zero-based index in the list of streams of this
* <tt>PushBufferDataSource</tt>. The <tt>Format</tt>-related information of
* the new instance is to be abstracted by a specific
@ -509,7 +509,7 @@ protected Format setFormat(
*
* @param locator the <tt>MediaLocator</tt> which specifies the media source
* of this <tt>DataSource</tt>
* @see DataSource#setLocator(MediaLocator)
* @see javax.media.protocol.DataSource#setLocator(MediaLocator)
*/
@Override
public void setLocator(MediaLocator locator)

@ -47,9 +47,9 @@ public class QuickTimeStream
/**
* The pool of <tt>ByteBuffer</tt>s this instances is using to transfer the
* media data captured by {@link #captureOutput} out of this instance
* through the <tt>Buffer</tt>s specified in its {@link #process(Buffer)}.
* through the <tt>Buffer</tt>s specified in its {@link #read(Buffer)}.
*/
private final List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
private final ByteBufferPool byteBufferPool = new ByteBufferPool();
/**
* The <tt>QTCaptureOutput</tt> represented by this <tt>SourceStream</tt>.
@ -63,13 +63,6 @@ public class QuickTimeStream
*/
private VideoFormat captureOutputFormat;
/**
* The indicator which determines whether this <tt>QuickTimeStream</tt> has
* been closed. Introduced to determine when <tt>ByteBuffer</tt>s are to be
* disposed of and no longer be pooled in {@link #buffers}.
*/
private boolean closed = false;
/**
* The captured media data to be returned in {@link #read(Buffer)}.
*/
@ -148,7 +141,6 @@ public class QuickTimeStream
automaticallyDropsLateVideoFrames
= false;//captureOutput.setAutomaticallyDropsLateVideoFrames(true);
captureOutput
.setDelegate(
new QTCaptureDecompressedVideoOutput.Delegate()
@ -204,11 +196,12 @@ private void captureOutputDidOutputVideoFrameWithSampleBuffer(
{
if (nextData != null)
{
returnFreeBuffer(nextData);
byteBufferPool.returnFreeBuffer(nextData);
nextData = null;
}
nextData = getFreeBuffer(pixelBuffer.getByteCount());
nextData
= byteBufferPool.getFreeBuffer(pixelBuffer.getByteCount());
if (nextData != null)
{
nextData.setLength(
@ -224,11 +217,11 @@ private void captureOutputDidOutputVideoFrameWithSampleBuffer(
if (data != null)
{
returnFreeBuffer(data);
byteBufferPool.returnFreeBuffer(data);
data = null;
}
data = getFreeBuffer(pixelBuffer.getByteCount());
data = byteBufferPool.getFreeBuffer(pixelBuffer.getByteCount());
if (data != null)
{
data.setLength(pixelBuffer.getBytes(data.ptr, data.capacity));
@ -238,7 +231,7 @@ private void captureOutputDidOutputVideoFrameWithSampleBuffer(
}
if (nextData != null)
{
returnFreeBuffer(nextData);
byteBufferPool.returnFreeBuffer(nextData);
nextData = null;
}
@ -274,31 +267,7 @@ public void close()
captureOutput.setDelegate(null);
synchronized (buffers)
{
closed = true;
Iterator<ByteBuffer> bufferIter = buffers.iterator();
boolean loggerIsTraceEnabled = logger.isTraceEnabled();
int leakedCount = 0;
while (bufferIter.hasNext())
{
ByteBuffer buffer = bufferIter.next();
if (buffer.isFree())
{
bufferIter.remove();
FFmpeg.av_free(buffer.ptr);
} else if (loggerIsTraceEnabled)
leakedCount++;
}
if (loggerIsTraceEnabled)
{
logger.trace(
"Leaking " + leakedCount + " ByteBuffer instances.");
}
}
byteBufferPool.close();
}
/**
@ -435,54 +404,6 @@ else if (captureOutputFormat instanceof AVFrameFormat)
return null;
}
/**
* Gets a <tt>ByteBuffer</tt> out of the pool of free <tt>ByteBuffer</tt>s
* (i.e. <tt>ByteBuffer</tt>s ready for writing captured media data into
* them) which is capable to receiving at least <tt>capacity</tt> number of
* bytes.
*
* @param capacity the minimal number of bytes that the returned
* <tt>ByteBuffer</tt> is to be capable of receiving
* @return a <tt>ByteBuffer</tt> which is ready for writing captured media
* data into and which is capable of receiving at least <tt>capacity</tt>
* number of bytes
*/
private ByteBuffer getFreeBuffer(int capacity)
{
synchronized (buffers)
{
if (closed)
return null;
int bufferCount = buffers.size();
ByteBuffer freeBuffer = null;
/*
* XXX Pad with FF_INPUT_BUFFER_PADDING_SIZE or hell will break
* loose.
*/
capacity += FFmpeg.FF_INPUT_BUFFER_PADDING_SIZE;
for (int bufferIndex = 0; bufferIndex < bufferCount; bufferIndex++)
{
ByteBuffer buffer = buffers.get(bufferIndex);
if (buffer.isFree() && (buffer.capacity >= capacity))
{
freeBuffer = buffer;
break;
}
}
if (freeBuffer == null)
{
freeBuffer = new ByteBuffer(capacity);
buffers.add(freeBuffer);
}
freeBuffer.setFree(false);
return freeBuffer;
}
}
/**
* Gets the <tt>Format</tt> of the media data made available by this
* <tt>PushBufferStream</tt> as indicated by a specific
@ -548,41 +469,11 @@ public void read(Buffer buffer)
}
if (bufferFormat instanceof AVFrameFormat)
{
Object bufferData = buffer.getData();
AVFrame bufferFrame;
long bufferFramePtr;
long bufferPtrToReturnFree;
if (bufferData instanceof AVFrame)
{
bufferFrame = (AVFrame) bufferData;
bufferFramePtr = bufferFrame.getPtr();
bufferPtrToReturnFree
= FFmpeg.avpicture_get_data0(bufferFramePtr);
}
else
{
bufferFrame = new FinalizableAVFrame();
buffer.setData(bufferFrame);
bufferFramePtr = bufferFrame.getPtr();
bufferPtrToReturnFree = 0;
}
AVFrameFormat bufferFrameFormat = (AVFrameFormat) bufferFormat;
Dimension bufferFrameSize = bufferFrameFormat.getSize();
FFmpeg.avpicture_fill(
bufferFramePtr,
data.ptr,
bufferFrameFormat.getPixFmt(),
bufferFrameSize.width, bufferFrameSize.height);
//System.err.println(
// "QuickTimeStream.read: bufferFramePtr= 0x"
// + Long.toHexString(bufferFramePtr)
// + ", data.ptr= 0x"
// + Long.toHexString(data.ptr));
if (bufferPtrToReturnFree != 0)
returnFreeBuffer(bufferPtrToReturnFree);
FinalizableAVFrame.read(
buffer,
bufferFormat,
data,
byteBufferPool);
}
else
{
@ -608,7 +499,7 @@ public void read(Buffer buffer)
buffer.setLength(dataLength);
buffer.setOffset(0);
returnFreeBuffer(data);
byteBufferPool.returnFreeBuffer(data);
}
buffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME);
@ -620,47 +511,6 @@ public void read(Buffer buffer)
}
}
/**
* Returns a specific <tt>ByteBuffer</tt> into the pool of free
* <tt>ByteBuffer</tt>s (i.e. <tt>ByteBuffer</tt>s ready for writing
* captured media data into them).
*
* @param buffer the <tt>ByteBuffer</tt> to be returned into the pool of
* free <tt>ByteBuffer</tt>s
*/
private void returnFreeBuffer(ByteBuffer buffer)
{
synchronized (buffers)
{
buffer.setFree(true);
if (closed && buffers.remove(buffer))
FFmpeg.av_free(buffer.ptr);
}
}
/**
* Returns a specific <tt>ByteBuffer</tt> given by the pointer to the native
* memory that it represents into the pool of free <tt>ByteBuffer</tt>s
* (i.e. <tt>ByteBuffer</tt>s ready for writing captured media data into
* them).
*
* @param bufferPtr the pointer to the native memory represented by the
* <tt>ByteBuffer</tt> to be returned into the pool of free
* <tt>ByteBuffer</tt>s
*/
private void returnFreeBuffer(long bufferPtr)
{
synchronized (buffers)
{
for (ByteBuffer buffer : buffers)
if (buffer.ptr == bufferPtr)
{
returnFreeBuffer(buffer);
break;
}
}
}
/**
* Calls {@link BufferTransferHandler#transferData(PushBufferStream)} from
* inside {@link #transferDataThread} so that the call is not made in
@ -683,7 +533,7 @@ private void runInTransferDataThread()
{
if (data != null)
{
returnFreeBuffer(data);
byteBufferPool.returnFreeBuffer(data);
data = null;
}
@ -863,13 +713,13 @@ public void stop()
{
if (data != null)
{
returnFreeBuffer(data);
byteBufferPool.returnFreeBuffer(data);
data = null;
}
dataFormat = null;
if (nextData != null)
{
returnFreeBuffer(nextData);
byteBufferPool.returnFreeBuffer(nextData);
nextData = null;
}
nextDataFormat = null;
@ -878,60 +728,4 @@ public void stop()
dataSyncRoot.notifyAll();
}
}
/**
* Represents an <tt>AVFrame</tt> used by this instance to provide captured
* media data in native format without representing the very frame data in
* the Java heap. Since this instance cannot know when the <tt>AVFrame</tt>
* instances are really safe for deallocation, <tt>FinalizableAVFrame</tt>
* relies on the Java finalization mechanism to reclaim the represented
* native memory.
*/
private class FinalizableAVFrame
extends AVFrame
{
/**
* The indicator which determines whether the native memory represented
* by this instance has already been freed/deallocated.
*/
private boolean freed = false;
/**
* Initializes a new <tt>FinalizableAVFrame</tt> instance which is to
* allocate a new native FFmpeg <tt>AVFrame</tt> and represent it.
*/
public FinalizableAVFrame()
{
super(FFmpeg.avcodec_alloc_frame());
}
/**
* Deallocates the native memory represented by this instance.
*
* @see Object#finalize()
*/
@Override
protected void finalize()
throws Throwable
{
try
{
if (!freed)
{
long ptr = getPtr();
long bufferPtr = FFmpeg.avpicture_get_data0(ptr);
if (bufferPtr != 0)
returnFreeBuffer(bufferPtr);
FFmpeg.av_free(ptr);
freed = true;
}
}
finally
{
super.finalize();
}
}
}
}

@ -0,0 +1,192 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.video4linux2;
import java.io.*;
import javax.media.*;
import javax.media.control.*;
import net.java.sip.communicator.impl.neomedia.device.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.*;
/**
* Implements a <tt>PullBufferDataSource</tt> and <tt>CaptureDevice</tt> using
* the Video for Linux Two API Specification.
*
* @author Lubomir Marinov
*/
public class DataSource
extends AbstractPullBufferCaptureDevice
{
/**
* The default height to request from Video for Linux Two API Specification
* devices.
*/
public static final int DEFAULT_HEIGHT = 480;
/**
* The default width to request from Video for Linux Two API Specification
* devices.
*/
public static final int DEFAULT_WIDTH = 640;
/**
* The file descriptor of the opened Video for Linux Two API Specification
* device represented by this <tt>DataSource</tt>.
*/
private int fd = -1;
/**
* Initializes a new <tt>DataSource</tt> instance.
*/
public DataSource()
{
}
/**
* Initializes a new <tt>DataSource</tt> instance from a specific
* <tt>MediaLocator</tt>.
*
* @param locator the <tt>MediaLocator</tt> to create the new instance from
*/
public DataSource(MediaLocator locator)
{
super(locator);
}
/**
* Creates a new <tt>PullBufferStream</tt> which is to be at a specific
* zero-based index in the list of streams of this
* <tt>PullBufferDataSource</tt>. The <tt>Format</tt>-related information of
* the new instance is to be abstracted by a specific
* <tt>FormatControl</tt>.
*
* @param streamIndex the zero-based index of the <tt>PullBufferStream</tt>
* in the list of streams of this <tt>PullBufferDataSource</tt>
* @param formatControl the <tt>FormatControl</tt> which is to abstract the
* <tt>Format</tt>-related information of the new instance
* @return a new <tt>PullBufferStream</tt> which is to be at the specified
* <tt>streamIndex</tt> in the list of streams of this
* <tt>PullBufferDataSource</tt> and which has its <tt>Format</tt>-related
* information abstracted by the specified <tt>formatControl</tt>
*/
protected Video4Linux2Stream createStream(
int streamIndex,
FormatControl formatControl)
{
return new Video4Linux2Stream(formatControl);
}
/**
* Opens a connection to the media source specified by the
* <tt>MediaLocator</tt> of this <tt>DataSource</tt>.
*
* @throws IOException if anything goes wrong while opening the connection
* to the media source specified by the <tt>MediaLocator</tt> of this
* <tt>DataSource</tt>
* @see AbstractPullBufferCaptureDevice#doConnect()
*/
@Override
protected void doConnect()
throws IOException
{
super.doConnect();
String deviceName = getDeviceName();
int fd = Video4Linux2.open(deviceName, Video4Linux2.O_RDWR);
if (-1 == fd)
throw new IOException("Failed to open " + deviceName);
else
{
boolean close = true;
try
{
synchronized (this)
{
for (Object stream : getStreams())
((Video4Linux2Stream) stream).setFd(fd);
}
close = false;
}
finally
{
if (close)
{
Video4Linux2.close(fd);
fd = -1;
}
}
this.fd = fd;
}
}
/**
* Closes the connection to the media source specified by the
* <tt>MediaLocator</tt> of this <tt>DataSource</tt>.
*/
protected void doDisconnect()
{
try
{
/*
* Letting the Video4Linux2Stream know that the fd is going to be
* closed is necessary at least because
* AbstractPullBufferStream#close() is not guaranteed.
*/
synchronized (this)
{
if (streams != null)
{
for (AbstractPullBufferStream stream : streams)
{
try
{
((Video4Linux2Stream) stream).setFd(-1);
}
catch (IOException ioex)
{
}
}
}
}
}
finally
{
try
{
super.doDisconnect();
}
finally
{
Video4Linux2.close(fd);
}
}
}
/**
* Gets the name of the Video for Linux Two API Specification device which
* represents the media source of this <tt>DataSource</tt>.
*
* @return the name of the Video for Linux Two API Specification device
* which represents the media source of this <tt>DataSource</tt>
*/
private String getDeviceName()
{
MediaLocator locator = getLocator();
return
((locator != null)
&& Video4Linux2Auto.LOCATOR_PROTOCOL
.equalsIgnoreCase(locator.getProtocol()))
? locator.getRemainder()
: null;
}
}

@ -0,0 +1,188 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.video4linux2;
/**
* Provides the interface to the native Video for Linux Two API Specification
* (http://v4l2spec.bytesex.org/spec/) implementation.
*
* @author Lubomir Marinov
*/
public class Video4Linux2
{
public static final int MAP_SHARED = 0x01;
public static final int O_NONBLOCK = 00004000;
public static final int O_RDWR = 00000002;
public static final int PROT_READ = 0x1;
public static final int PROT_WRITE = 0x2;
public static final int V4L2_BUF_TYPE_VIDEO_CAPTURE = 1;
public static final int V4L2_CAP_STREAMING = 0x04000000;
public static final int V4L2_CAP_VIDEO_CAPTURE = 0x00000001;
public static final int V4L2_FIELD_NONE = 1;
public static final int V4L2_MEMORY_MMAP = 1;
public static final int V4L2_MEMORY_USERPTR = 2;
public static final int V4L2_PIX_FMT_RGB24;
public static final int V4L2_PIX_FMT_UYVY;
public static final int VIDIOC_DQBUF;
public static final int VIDIOC_G_FMT;
public static final int VIDIOC_QBUF;
public static final int VIDIOC_QUERYBUF;
public static final int VIDIOC_QUERYCAP;
public static final int VIDIOC_REQBUFS;
public static final int VIDIOC_S_FMT;
public static final int VIDIOC_STREAMOFF;
public static final int VIDIOC_STREAMON;
static
{
System.loadLibrary("jvideo4linux2");
V4L2_PIX_FMT_RGB24 = V4L2_PIX_FMT_RGB24();
V4L2_PIX_FMT_UYVY = V4L2_PIX_FMT_UYVY();
VIDIOC_DQBUF = VIDIOC_DQBUF();
VIDIOC_G_FMT = VIDIOC_G_FMT();
VIDIOC_QBUF = VIDIOC_QBUF();
VIDIOC_QUERYBUF = VIDIOC_QUERYBUF();
VIDIOC_QUERYCAP = VIDIOC_QUERYCAP();
VIDIOC_REQBUFS = VIDIOC_REQBUFS();
VIDIOC_S_FMT = VIDIOC_S_FMT();
VIDIOC_STREAMOFF = VIDIOC_STREAMOFF();
VIDIOC_STREAMON = VIDIOC_STREAMON();
}
public static native int close(int fd);
public static native void free(long ptr);
public static native int ioctl(int fd, int request, long argp);
public static native long memcpy(long dest, long src, int n);
public static native long mmap(
long start,
int length,
int prot,
int flags,
int fd,
long offset);
public static native int munmap(long start, int length);
public static native int open(String deviceName, int flags);
public static native long v4l2_buffer_alloc(int type);
public static native int v4l2_buffer_getBytesused(long v4l2_buffer);
public static native int v4l2_buffer_getIndex(long v4l2_buffer);
public static native int v4l2_buffer_getLength(long v4l2_buffer);
public static native long v4l2_buffer_getMOffset(long v4l2_buffer);
public static native void v4l2_buffer_setIndex(
long v4l2_buffer,
int index);
public static native void v4l2_buffer_setMemory(
long v4l2_buffer,
int memory);
public static native long v4l2_buf_type_alloc(int type);
public static native long v4l2_capability_alloc();
public static native int v4l2_capability_getCapabilities(
long v4l2_capability);
public static native String v4l2_capability_getCard(
long v4l2_capability);
public static native long v4l2_format_alloc(int type);
public static native long v4l2_format_getFmtPix(long v4l2_format);
private static native int V4L2_PIX_FMT_RGB24();
private static native int V4L2_PIX_FMT_UYVY();
public static native int v4l2_pix_format_getHeight(
long v4l2_pix_format);
public static native int v4l2_pix_format_getPixelformat(
long v4l2_pix_format);
public static native int v4l2_pix_format_getWidth(long v4l2_pix_format);
public static native void v4l2_pix_format_setBytesperline(
long v4l2_pix_format,
int bytesperline);
public static native void v4l2_pix_format_setField(
long v4l2_pix_format,
int field);
public static native void v4l2_pix_format_setPixelformat(
long v4l2_pix_format,
int pixelformat);
public static native void v4l2_pix_format_setWidthAndHeight(
long v4l2_pix_format,
int width, int height);
public static native long v4l2_requestbuffers_alloc(int type);
public static native int v4l2_requestbuffers_getCount(
long v4l2_requestbuffers);
public static native void v4l2_requestbuffers_setCount(
long v4l2_requestbuffers,
int count);
public static native void v4l2_requestbuffers_setMemory(
long v4l2_requestbuffers,
int memory);
private static native int VIDIOC_DQBUF();
private static native int VIDIOC_G_FMT();
private static native int VIDIOC_QBUF();
private static native int VIDIOC_QUERYBUF();
private static native int VIDIOC_QUERYCAP();
private static native int VIDIOC_REQBUFS();
private static native int VIDIOC_S_FMT();
private static native int VIDIOC_STREAMOFF();
private static native int VIDIOC_STREAMON();
}

@ -0,0 +1,739 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.video4linux2;
import java.awt.*;
import java.io.*;
import javax.media.*;
import javax.media.control.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.neomedia.codec.video.*;
import net.java.sip.communicator.impl.neomedia.jmfext.media.protocol.*;
/**
* Implements a <tt>PullBufferStream</tt> using the Video for Linux Two API
* Specification.
*
* @author Lubomir Marinov
*/
public class Video4Linux2Stream
extends AbstractPullBufferStream
{
/**
* The pool of <tt>ByteBuffer</tt>s this instances is using to transfer the
* media data captured by the Video for Linux Two API Specification device
* out of this instance through the <tt>Buffer</tt>s specified in its
* {@link #read(Buffer)}.
*/
private final ByteBufferPool byteBufferPool = new ByteBufferPool();
/**
* The capabilities of the Video for Linux Two API Specification device
* represented by {@link #fd}.
*/
private int capabilities = 0;
/**
* The file descriptor of the Video for Linux Two API Specification device
* read through this <tt>PullBufferStream</tt>.
*/
private int fd = -1;
/**
* The last-known <tt>Format</tt> of the media data made available by this
* <tt>PullBufferStream</tt>
*/
private Format format;
/**
* The lengths in bytes of the buffers in the application's address space
* through which the Video for Linux Two API Specification device provides
* the captured media data to this instance when
* {@link #requestbuffersMemory} is equal to <tt>V4L2_MEMORY_MAP</tt>.
*/
private int[] mmapLengths;
/**
* The buffers through which the Video for Linux Two API Specification
* device provides the captured media data to this instance when
* {@link #requestbuffersMemory} is equal to <tt>V4L2_MEMORY_MAP</tt>. These
* are mapped in the application's address space.
*/
private long[] mmaps;
/**
* The number of buffers through which the Video for Linux Two API
* Specification device provides the captured media data to this instance
* when {@link #requestbuffersMemory} is equal to <tt>V4L2_MEMORY_MMAP</tt>.
*/
private int requestbuffersCount = 0;
/**
* The input method negotiated by this instance with the Video for Linux Two
* API Specification device.
*/
private int requestbuffersMemory = 0;
/**
* The <tt>v4l2_buffer</tt> instance via which captured media data is
* fetched from the Video for Linux Two API Specification device to this
* instance in {@link #read(Buffer)}.
*/
private long v4l2_buffer;
/**
* Initializes a new <tt>Video4Linux2Stream</tt> instance which is to have
* its <tt>Format</tt>-related information abstracted by a specific
* <tt>FormatControl</tt>.
*
* @param formatControl the <tt>FormatControl</tt> which is to abstract the
* <tt>Format</tt>-related information of the new instance
*/
public Video4Linux2Stream(FormatControl formatControl)
{
super(formatControl);
v4l2_buffer
= Video4Linux2.v4l2_buffer_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (0 == v4l2_buffer)
throw new OutOfMemoryError("v4l2_buffer_alloc");
Video4Linux2.v4l2_buffer_setMemory(
v4l2_buffer,
Video4Linux2.V4L2_MEMORY_MMAP);
}
/**
* Releases the resources used by this instance throughout its existence and
* makes it available for garbage collection. This instance is considered
* unusable after closing.
*
* @see AbstractPullBufferStream#close()
*/
@Override
public void close()
{
super.close();
if (v4l2_buffer != 0)
{
Video4Linux2.free(v4l2_buffer);
v4l2_buffer = 0;
}
}
/**
* Gets the <tt>Format</tt> of this <tt>PullBufferStream</tt> as directly
* known by it.
*
* @return the <tt>Format</tt> of this <tt>PullBufferStream</tt> as directly
* known by it or <tt>null</tt> if this <tt>PullBufferStream</tt> does not
* directly know its <tt>Format</tt> and it relies on the
* <tt>PullBufferDataSource</tt> which created it to report its
* <tt>Format</tt>
* @see AbstractPullBufferStream#doGetFormat()
*/
@Override
protected Format doGetFormat()
{
Format format;
if (this.format == null)
{
format = getFdFormat();
if (format == null)
format = super.doGetFormat();
else
{
VideoFormat videoFormat = (VideoFormat) format;
if (videoFormat.getSize() != null)
this.format = format;
}
}
else
format = this.format;
return format;
}
/**
* Gets the <tt>Format</tt> of the media data captured by the Video for
* Linux Two API Specification device represented by the <tt>fd</tt> of this
* instance.
*
* @return the <tt>Format</tt> of the media data captured by the Video for
* Linux Two API Specification device represented by the <tt>fd</tt> of this
* instance
*/
private Format getFdFormat()
{
Format format = null;
if (-1 != fd)
{
long v4l2_format
= Video4Linux2.v4l2_format_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (v4l2_format == 0)
throw new OutOfMemoryError("v4l2_format_alloc");
else
{
try
{
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_G_FMT,
v4l2_format)
!= -1)
{
long fmtPix
= Video4Linux2.v4l2_format_getFmtPix(v4l2_format);
int pixelformat
= Video4Linux2.v4l2_pix_format_getPixelformat(
fmtPix);
if (Video4Linux2.V4L2_PIX_FMT_UYVY == pixelformat)
{
int width
= Video4Linux2.v4l2_pix_format_getWidth(fmtPix);
int height
= Video4Linux2.v4l2_pix_format_getHeight(
fmtPix);
format
= new AVFrameFormat(
new Dimension(width, height),
Format.NOT_SPECIFIED,
FFmpeg.PIX_FMT_UYVY422);
}
}
}
finally
{
Video4Linux2.free(v4l2_format);
}
}
}
return format;
}
/**
* Unmaps the buffers through which the Video for Linux Two API
* Specification device provides the captured media data to this instance
* when {@link #requestbuffersMemory} is equal to <tt>V4L2_MEMORY_MMAP</tt>
* i.e. breaks the buffers' mappings between the driver's and the
* application's address spaces.
*/
private void munmap()
{
try
{
if (mmaps != null)
{
for (int i = 0; i < mmaps.length; i++)
{
long mmap = mmaps[i];
if (mmap != 0)
{
Video4Linux2.munmap(mmap, mmapLengths[i]);
mmaps[i] = 0;
mmapLengths[i] = 0;
}
}
}
}
finally
{
mmaps = null;
mmapLengths = null;
}
}
/**
* Negotiates the input method with the Video for Linux Two API
* Specification device represented by the <tt>fd</tt> of this instance.
*
* @throws IOException if anything goes wrong while negotiating the input
* method with the Video for Linux Two API Specification device represented
* by the <tt>fd</tt> of this instance
*/
private void negotiateFdInputMethod()
throws IOException
{
long v4l2_capability = Video4Linux2.v4l2_capability_alloc();
if (0 == v4l2_capability)
throw new OutOfMemoryError("v4l2_capability_alloc");
try
{
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_QUERYCAP,
v4l2_capability)
== -1)
throw new IOException("ioctl: request= VIDIOC_QUERYCAP");
capabilities
= Video4Linux2.v4l2_capability_getCapabilities(v4l2_capability);
}
finally
{
Video4Linux2.free(v4l2_capability);
}
if ((capabilities & Video4Linux2.V4L2_CAP_STREAMING)
!= Video4Linux2.V4L2_CAP_STREAMING)
throw new IOException("Non-streaming V4L2 device not supported.");
long v4l2_requestbuffers
= Video4Linux2.v4l2_requestbuffers_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (0 == v4l2_requestbuffers)
throw new OutOfMemoryError("v4l2_requestbuffers_alloc");
try
{
requestbuffersMemory = Video4Linux2.V4L2_MEMORY_MMAP;
Video4Linux2.v4l2_requestbuffers_setMemory(
v4l2_requestbuffers,
requestbuffersMemory);
Video4Linux2.v4l2_requestbuffers_setCount(v4l2_requestbuffers, 2);
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_REQBUFS,
v4l2_requestbuffers)
== -1)
{
throw
new IOException(
"ioctl: request= VIDIOC_REQBUFS, memory= "
+ requestbuffersMemory);
}
requestbuffersCount
= Video4Linux2.v4l2_requestbuffers_getCount(
v4l2_requestbuffers);
}
finally
{
Video4Linux2.free(v4l2_requestbuffers);
}
if (requestbuffersCount < 1)
throw new IOException("Insufficient V4L2 device memory.");
long v4l2_buffer
= Video4Linux2.v4l2_buffer_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (0 == v4l2_buffer)
throw new OutOfMemoryError("v4l2_buffer_alloc");
try
{
Video4Linux2.v4l2_buffer_setMemory(
v4l2_buffer,
Video4Linux2.V4L2_MEMORY_MMAP);
mmaps = new long[requestbuffersCount];
mmapLengths = new int[requestbuffersCount];
boolean munmap = true;
try
{
for (int i = 0; i < requestbuffersCount; i++)
{
Video4Linux2.v4l2_buffer_setIndex(v4l2_buffer, i);
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_QUERYBUF,
v4l2_buffer)
== -1)
{
throw
new IOException("ioctl: request= VIDIOC_QUERYBUF");
}
int length
= Video4Linux2.v4l2_buffer_getLength(v4l2_buffer);
long offset
= Video4Linux2.v4l2_buffer_getMOffset(v4l2_buffer);
long mmap
= Video4Linux2.mmap(
0,
length,
Video4Linux2.PROT_READ
| Video4Linux2.PROT_WRITE,
Video4Linux2.MAP_SHARED,
fd,
offset);
if (-1 == mmap)
throw new IOException("mmap");
mmaps[i] = mmap;
mmapLengths[i] = length;
}
munmap = false;
}
finally
{
if (munmap)
munmap();
}
}
finally
{
Video4Linux2.free(v4l2_buffer);
}
}
/**
* Reads media data from this <tt>PullBufferStream</tt> into a specific
* <tt>Buffer</tt> with blocking.
*
* @param buffer the <tt>Buffer</tt> in which media data is to be read from
* this <tt>PullBufferStream</tt>
* @throws IOException if anything goes wrong while reading media data from
* this <tt>PullBufferStream</tt> into the specified <tt>buffer</tt>
*/
public void read(Buffer buffer)
throws IOException
{
Format format = buffer.getFormat();
if (!(format instanceof AVFrameFormat))
format = null;
if (format == null)
{
format = getFormat();
if (format != null)
buffer.setFormat(format);
}
if (Video4Linux2.ioctl(fd, Video4Linux2.VIDIOC_DQBUF, v4l2_buffer)
== -1)
throw new IOException("ioctl: request= VIDIOC_DQBUF");
try
{
int bytesused = Video4Linux2.v4l2_buffer_getBytesused(v4l2_buffer);
ByteBuffer data = byteBufferPool.getFreeBuffer(bytesused);
if (data != null)
{
int index = Video4Linux2.v4l2_buffer_getIndex(v4l2_buffer);
long mmap = mmaps[index];
Video4Linux2.memcpy(data.ptr, mmap, bytesused);
data.setLength(bytesused);
FinalizableAVFrame.read(buffer, format, data, byteBufferPool);
}
}
finally
{
if (Video4Linux2.ioctl(fd, Video4Linux2.VIDIOC_QBUF, v4l2_buffer)
== -1)
throw new IOException("ioctl: request= VIDIOC_QBUF");
}
}
/**
* Sets the file descriptor of the Video for Linux Two API Specification
* device which is to be read through this <tt>PullBufferStream</tt>.
*
* @param fd the file descriptor of the Video for Linux Two API
* Specification device which is to be read through this
* <tt>PullBufferStream</tt>
* @throws IOException if anything goes wrong while setting the file
* descriptor of the Video for Linux Two API Specification device which is
* to be read through this <tt>PullBufferStream</tt>
*/
void setFd(int fd)
throws IOException
{
if (this.fd != fd)
{
if (this.fd != -1)
{
try
{
stop();
}
catch (IOException ioex)
{
}
munmap();
}
/*
* Before a Video for Linux Two API Specification device can be
* read, an attempt to set its format must be made and its cropping
* must be reset. We can only learn about the format to be set from
* formatControl. But since this AbstractPullBufferStream exists
* already, formatControl will ask it about its format. So pretend
* that there is no device prior to asking formatControl about the
* format in order to get the format that has been set by the user.
*/
this.fd = -1;
this.capabilities = 0;
this.requestbuffersMemory = 0;
this.requestbuffersCount = 0;
if (fd != -1)
{
Format format = getFormat();
this.fd = fd;
if (format != null)
setFdFormat(format);
setFdCropToDefault();
negotiateFdInputMethod();
}
}
}
/**
* Sets the crop of the Video for Linux Two API Specification device
* represented by the <tt>fd</tt> of this instance to its default value so
* that this <tt>PullBufferStream</tt> reads media data without cropping.
*/
private void setFdCropToDefault()
{
// TODO Auto-generated method stub
}
/**
* Sets the <tt>Format</tt> in which the Video for Linux Two API
* Specification device represented by the <tt>fd</tt> of this instance is
* to capture media data.
*
* @param format the <tt>Format</tt> of the media data to be captured by the
* Video for Linux Two API Specification device represented by the
* <tt>fd</tt> of this instance
* @throws IOException if anything goes wrong while setting the
* <tt>Format</tt> of the media data to be captured by the Video for Linux
* Two API Specification device represented by the <tt>fd</tt> of this
* instance
*/
private void setFdFormat(Format format)
throws IOException
{
int pixelformat = 0;
if (format instanceof AVFrameFormat)
{
int pixFmt = ((AVFrameFormat) format).getPixFmt();
if (FFmpeg.PIX_FMT_UYVY422 == pixFmt)
pixelformat = Video4Linux2.V4L2_PIX_FMT_UYVY;
}
if (pixelformat == 0)
throw new IOException("Unsupported format " + format);
long v4l2_format
= Video4Linux2.v4l2_format_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (v4l2_format == 0)
throw new OutOfMemoryError("v4l2_format_alloc");
try
{
if (Video4Linux2.ioctl(fd, Video4Linux2.VIDIOC_G_FMT, v4l2_format)
== -1)
throw new IOException("ioctl: request= VIDIO_G_FMT");
VideoFormat videoFormat = (VideoFormat) format;
Dimension size = videoFormat.getSize();
long fmtPix = Video4Linux2.v4l2_format_getFmtPix(v4l2_format);
int width = Video4Linux2.v4l2_pix_format_getWidth(fmtPix);
int height = Video4Linux2.v4l2_pix_format_getHeight(fmtPix);
boolean setFdFormat = false;
if ((size == null)
&& ((DataSource.DEFAULT_WIDTH > width)
|| (DataSource.DEFAULT_HEIGHT > height)))
{
size
= new Dimension(
DataSource.DEFAULT_WIDTH,
DataSource.DEFAULT_HEIGHT);
}
if ((size != null)
&& ((size.width != width) || (size.height != height)))
{
Video4Linux2.v4l2_pix_format_setWidthAndHeight(
fmtPix,
size.width, size.height);
setFdFormat = true;
}
if (Video4Linux2.v4l2_pix_format_getPixelformat(v4l2_format)
!= pixelformat)
{
Video4Linux2.v4l2_pix_format_setPixelformat(
fmtPix,
pixelformat);
setFdFormat = true;
}
if (setFdFormat)
setFdFormat(v4l2_format, fmtPix, size, pixelformat);
}
finally
{
Video4Linux2.free(v4l2_format);
}
}
private void setFdFormat(
long v4l2_format,
long fmtPix,
Dimension size,
int pixelformat)
throws IOException
{
Video4Linux2.v4l2_pix_format_setField(
fmtPix,
Video4Linux2.V4L2_FIELD_NONE);
Video4Linux2.v4l2_pix_format_setBytesperline(fmtPix, 0);
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_S_FMT,
v4l2_format)
== -1)
{
throw
new IOException(
"ioctl: request= VIDIOC_S_FMT"
+ ((size == null)
? ""
: (", width= "
+ size.width
+ ", height= "
+ size.height))
+ ", pixelformat= "
+ pixelformat);
}
else if (Video4Linux2.v4l2_pix_format_getPixelformat(fmtPix)
!= pixelformat)
{
throw
new IOException(
"Failed to change the format of the V4L2 device to "
+ pixelformat);
}
}
/**
* Starts the transfer of media data from this <tt>PullBufferStream</tt>.
*
* @throws IOException if anything goes wrong while starting the transfer of
* media data from this <tt>PullBufferStream</tt>
* @see AbstractPullBufferStream#start()
*/
@Override
public void start()
throws IOException
{
super.start();
long v4l2_buffer
= Video4Linux2.v4l2_buffer_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (0 == v4l2_buffer)
throw new OutOfMemoryError("v4l2_buffer_alloc");
try
{
Video4Linux2.v4l2_buffer_setMemory(
v4l2_buffer,
Video4Linux2.V4L2_MEMORY_MMAP);
for (int i = 0; i < requestbuffersCount; i++)
{
Video4Linux2.v4l2_buffer_setIndex(v4l2_buffer, i);
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_QBUF,
v4l2_buffer)
== -1)
{
throw
new IOException(
"ioctl: request= VIDIOC_QBUF, index= " + i);
}
}
}
finally
{
Video4Linux2.free(v4l2_buffer);
}
long v4l2_buf_type
= Video4Linux2.v4l2_buf_type_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (0 == v4l2_buf_type)
throw new OutOfMemoryError("v4l2_buf_type_alloc");
try
{
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_STREAMON,
v4l2_buf_type)
== -1)
{
throw new IOException("ioctl: request= VIDIOC_STREAMON");
}
}
finally
{
Video4Linux2.free(v4l2_buf_type);
}
}
/**
* Stops the transfer of media data from this <tt>PullBufferStream</tt>.
*
* @throws IOException if anything goes wrong while stopping the transfer of
* media data from this <tt>PullBufferStream</tt>
* @see AbstractPullBufferStream#stop()
*/
@Override
public void stop()
throws IOException
{
try
{
long v4l2_buf_type
= Video4Linux2.v4l2_buf_type_alloc(
Video4Linux2.V4L2_BUF_TYPE_VIDEO_CAPTURE);
if (0 == v4l2_buf_type)
throw new OutOfMemoryError("v4l2_buf_type_alloc");
try
{
if (Video4Linux2.ioctl(
fd,
Video4Linux2.VIDIOC_STREAMOFF,
v4l2_buf_type)
== -1)
{
throw new IOException("ioctl: request= VIDIOC_STREAMOFF");
}
}
finally
{
Video4Linux2.free(v4l2_buf_type);
}
}
finally
{
super.stop();
}
}
}
Loading…
Cancel
Save