On Windows, implements JNI-based replacements for the JNA-based H.264 encoder and decoder. The new implementations execute much faster, produce less garbage and use a newer ffmpeg version (which contains optimizations to at least the H.264 decoder).

Mac OS X and Linux will still use the JNA-based implementations because the respective native JNI libraries haven't been built for them.

Just as the JNA-based implementations, the JNI-based ones cause disturbing numbers of garbage collections (though less than their ancestors) - the encoder and the packetizer are huge offenders. But these will be addressed in subsequent revisions because the problems are not the subject of the switch from JNA to JNI.
cusax-fix
Lyubomir Marinov 17 years ago
parent 17c2558453
commit cc373b375b

Binary file not shown.

@ -82,6 +82,16 @@ public class EncodingConfiguration
private final Map<String, Integer> encodingPreferences =
new Hashtable<String, Integer>();
/**
* The indicator which determines whether the JNI implementations of the
* H.264 encoder and decoder are to be preferred to the JNA ones. It's
* currently <tt>true</tt> for platforms which actually have JNI
* implementations and allows testing on them while still defaulting the the
* less efficient JNA ones on the platforms without JNI counterparts.
*/
private static final boolean H264_JNI =
System.getProperty("os.name").contains("Windows");
private static final String[] customCodecs =
new String[]
{
@ -93,9 +103,11 @@ public class EncodingConfiguration
: "net.java.sip.communicator.impl.media.codec.audio.alaw.Packetizer",
FMJConditionals.FMJ_CODECS ? "net.sf.fmj.media.codec.audio.ulaw.Packetizer"
: "net.java.sip.communicator.impl.media.codec.audio.ulaw.Packetizer",
"net.java.sip.communicator.impl.media.codec.video.h264.NativeEncoder",
"net.java.sip.communicator.impl.media.codec.video.h264."
+ (H264_JNI ? "JNI" : "Native") + "Encoder",
"net.java.sip.communicator.impl.media.codec.video.h264.Packetizer",
"net.java.sip.communicator.impl.media.codec.video.h264.NativeDecoder",
"net.java.sip.communicator.impl.media.codec.video.h264."
+ (H264_JNI ? "JNI" : "Native") + "Decoder",
"net.java.sip.communicator.impl.media.codec.video.ImageScaler",
"net.java.sip.communicator.impl.media.codec.audio.speex.JavaEncoder",
"net.java.sip.communicator.impl.media.codec.audio.speex.JavaDecoder",

@ -0,0 +1,159 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.codec.video;
public class FFMPEG
{
public static final int CODEC_FLAG_LOOP_FILTER = 0x00000800;
public static final int CODEC_ID_H264 = 28;
public static final int FF_BUG_AUTODETECT = 1;
public static final int FF_CMP_CHROMA = 256;
public static final int FF_INPUT_BUFFER_PADDING_SIZE = 8;
public static final int FF_MB_DECISION_SIMPLE = 0;
public static final int PIX_FMT_RGB32 = 6;
public static final int PIX_FMT_YUV420P = 0;
public static native void av_free(long ptr);
public static native long av_malloc(int size);
public static native void av_register_all();
public static native long avcodec_alloc_context();
public static native long avcodec_alloc_frame();
public static native int avcodec_close(long avctx);
public static native int avcodec_decode_video(long avctx, long frame,
boolean[] got_picture, byte[] buf, int buf_size);
public static native int avcodec_encode_video(long avctx, byte[] buff,
int buf_size, long frame);
public static native long avcodec_find_decoder(int id);
public static native long avcodec_find_encoder(int id);
public static native void avcodec_init();
public static native int avcodec_open(long avctx, long codec);
public static native void avcodeccontext_add_flags(long avctx, int flags);
public static native void avcodeccontext_add_partitions(long avctx,
int partitions);
public static native int avcodeccontext_get_height(long avctx);
public static native int avcodeccontext_get_pix_fmt(long avctx);
public static native int avcodeccontext_get_width(long avctx);
public static native void avcodeccontext_set_bit_rate(long avctx,
int bit_rate);
public static native void avcodeccontext_set_bit_rate_tolerance(long avctx,
int bit_rate_tolerance);
public static native void avcodeccontext_set_crf(long avctx, float crf);
public static native void avcodeccontext_set_gop_size(long avctx,
int gop_size);
public static native void avcodeccontext_set_i_quant_factor(long avctx,
float i_quant_factor);
public static native void avcodeccontext_set_mb_decision(long avctx,
int mb_decision);
public static native void avcodeccontext_set_me_cmp(long avctx, int me_cmp);
public static native void avcodeccontext_set_me_method(long avctx,
int me_method);
public static native void avcodeccontext_set_me_range(long avctx,
int me_range);
public static native void avcodeccontext_set_me_subpel_quality(long avctx,
int me_subpel_quality);
public static native void avcodeccontext_set_pix_fmt(long avctx, int pix_fmt);
public static native void avcodeccontext_set_qcompress(long avctx,
float qcompress);
public static native void avcodeccontext_set_quantizer(long avctx,
int qmin, int qmax, int max_qdiff);
public static native void avcodeccontext_set_rc_buffer_size(long avctx,
int rc_buffer_size);
public static native void avcodeccontext_set_rc_eq(long avctx, String rc_eq);
public static native void avcodeccontext_set_rc_max_rate(long avctx,
int rc_max_rate);
public static native void avcodeccontext_set_sample_aspect_ratio(
long avctx, int num, int den);
public static native void avcodeccontext_set_scenechange_threshold(
long avctx, int scenechange_threshold);
public static native void avcodeccontext_set_size(long avctx, int width,
int height);
public static native void avcodeccontext_set_thread_count(long avctx,
int thread_count);
public static native void avcodeccontext_set_time_base(long avctx, int num,
int den);
public static native void avcodeccontext_set_workaround_bugs(long avctx,
int workaround_bugs);
public static native void avframe_set_data(long frame, long data0,
long offset1, long offset2);
public static native void avframe_set_key_frame(long frame,
boolean key_frame);
public static native void avframe_set_linesize(long frame, int linesize0,
int linesize1, int linesize2);
public static native int avpicture_fill(long picture, long ptr,
int pix_fmt, int width, int height);
public static native long avpicture_get_data0(long picture);
public static native int avpicture_get_size(int pix_fmt, int width,
int height);
public static native int img_convert(long dst, int dst_pix_fmt, long src,
int pix_fmt, int width, int height);
public static native void memcpy(int[] dst, int dst_offset, int dst_length,
long src);
public static native void memcpy(long dst, byte[] src, int src_offset,
int src_length);
static
{
System.loadLibrary("ffmpeg");
av_register_all();
avcodec_init();
}
}

@ -6,6 +6,8 @@
*/
package net.java.sip.communicator.impl.media.codec.video.h264;
import java.util.*;
import javax.media.*;
import net.java.sip.communicator.util.*;
@ -15,6 +17,7 @@
* the decoder expects it. RFC3984.
*
* @author Damian Minkov
* @author Lubomir Marinov
*/
public class H264Parser
{
@ -30,10 +33,25 @@ public class H264Parser
private long lastTimestamp = -1;
// the result data is collected in this buffer
private byte[] encodedFrame = new byte[MAX_FRAME_SIZE];
private final byte[] encodedFrame;
// the size of the result data
private int encodedFrameLen;
private final int encodedFramePaddingSize;
public H264Parser()
{
this(0);
}
public H264Parser(int encodedFramePaddingSize)
{
this.encodedFramePaddingSize = encodedFramePaddingSize;
this.encodedFrame =
new byte[MAX_FRAME_SIZE + this.encodedFramePaddingSize];
}
/**
* New rtp packet is received. We push it to the parser to extract the data.
* @param inputBuffer the data from the rtp packet
@ -70,11 +88,12 @@ public boolean pushRTPInput(Buffer inputBuffer)
int len = inputBuffer.getLength();
System.arraycopy(inData, inputOffset, encodedFrame, encodedFrameLen, len);
encodedFrameLen += len;
ensureEncodedFramePaddingSize();
}
else if (type == 24)
{
//else if (type == 24)
//{
//return deencapsulateSTAP(inputBuffer);
}
//}
else if (type == 28)
{
deencapsulateFU(fByte, inputBuffer);
@ -138,6 +157,7 @@ private void deencapsulateFU (byte nal, Buffer inputBuffer)
}
System.arraycopy(buf, offset, encodedFrame, encodedFrameLen, len);
encodedFrameLen += len;
ensureEncodedFramePaddingSize();
}
/**
@ -160,7 +180,13 @@ public int getEncodedFrameLen()
void reset()
{
encodedFrame = new byte[MAX_FRAME_SIZE];
encodedFrameLen = 0;
ensureEncodedFramePaddingSize();
}
private void ensureEncodedFramePaddingSize()
{
Arrays.fill(encodedFrame, encodedFrameLen, encodedFrameLen
+ encodedFramePaddingSize, (byte) 0);
}
}

@ -0,0 +1,391 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.codec.video.h264;
import java.awt.*;
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.media.codec.*;
import net.java.sip.communicator.impl.media.codec.video.*;
import net.java.sip.communicator.util.*;
import net.sf.fmj.media.*;
/**
* Decodes incoming rtp data of type h264 and returns the result frames in RGB
* format.
*
* @author Damian Minkov
* @author Lubomir Marinov
*/
public class JNIDecoder
extends AbstractCodec
implements Codec
{
private final Logger logger = Logger.getLogger(JNIDecoder.class);
private static final String PLUGIN_NAME = "H.264 Decoder";
private static final int RED_MASK = 0xff0000;
private static final int GREEN_MASK = 0x00ff00;
private static final int BLUE_MASK = 0x0000ff;
private final VideoFormat[] outputFormats;
private final VideoFormat[] defaultOutputFormats = new VideoFormat[]
{ new RGBFormat() };
// The codec we will use
private long avcontext;
// The decoded data is stored in avpicture in native ffmpeg format (YUV)
private long avframe;
// Used to convert decoded data to RGB
private long frameRGB;
// The parser used to parse rtp content
private final H264Parser parser =
new H264Parser(FFMPEG.FF_INPUT_BUFFER_PADDING_SIZE);
// supported sizes by the codec
private final Dimension[] supportedSizes = new Dimension[]
{
// P720
new Dimension(720, 480),
// CIF4
new Dimension(704, 576),
// CIF
new Dimension(352, 288), new Dimension(320, 240),
// QCIF
new Dimension(176, 144),
// SQCIF
new Dimension(128, 96) };
// index of default size (output format)
private static int defaultSizeIx = 2;
// current width of video, so we can detect changes in video size
private double currentVideoWidth;
// keep track of last received sequence in order to avoid inconsistent data
private long lastReceivedSeq = -1;
// in case of inconsistent data drop all data till a marker is received
private boolean waitingForMarker = false;
private final boolean[] got_picture = new boolean[1];
/**
* Constructs new h264 decoder
*/
public JNIDecoder()
{
inputFormats = new VideoFormat[]
{ new VideoFormat(Constants.H264_RTP) };
outputFormats = new VideoFormat[supportedSizes.length];
Dimension targetVideoSize =
new Dimension(Constants.VIDEO_WIDTH, Constants.VIDEO_HEIGHT);
for (int i = 0; i < supportedSizes.length; i++)
{
Dimension size = supportedSizes[i];
if (size.equals(targetVideoSize))
defaultSizeIx = i;
outputFormats[i] =
// PIX_FMT_RGB32
new RGBFormat(size, -1, Format.intArray,
ensureFrameRate(Format.NOT_SPECIFIED), 32, RED_MASK,
GREEN_MASK, BLUE_MASK, 1, size.width, Format.FALSE,
Format.NOT_SPECIFIED);
}
currentVideoWidth = outputFormats[defaultSizeIx].getSize().getWidth();
}
protected Format[] getMatchingOutputFormats(Format in)
{
VideoFormat ivf = (VideoFormat) in;
Dimension inSize = ivf.getSize();
VideoFormat ovf;
// return the default size/currently decoder and encoder
// set to transmit/receive at this size
if (inSize == null)
{
ovf = outputFormats[defaultSizeIx];
}
else
{
ovf = null;
for (int i = 0; i < outputFormats.length; i++)
{
VideoFormat vf = outputFormats[i];
if (vf.getSize().equals(inSize))
{
ovf = vf;
break;
}
}
}
if (ovf == null)
return null;
Dimension outSize = ovf.getSize();
return new Format[]
{ new RGBFormat(outSize, -1, Format.intArray,
ensureFrameRate(ivf.getFrameRate()), 32, RED_MASK, GREEN_MASK,
BLUE_MASK, 1, outSize.width, Format.FALSE, Format.NOT_SPECIFIED) };
}
/**
* Set the data input format.
*
* @return false if the format is not supported.
*/
@Override
public Format setInputFormat(Format format)
{
if (super.setInputFormat(format) != null)
{
reset();
return format;
}
else
return null;
}
@Override
public Format setOutputFormat(Format format)
{
return super.setOutputFormat(format);
}
/**
* Init the codec instances.
*/
public synchronized void open() throws ResourceUnavailableException
{
if (opened)
return;
long avcodec = FFMPEG.avcodec_find_decoder(FFMPEG.CODEC_ID_H264);
avcontext = FFMPEG.avcodec_alloc_context();
FFMPEG.avcodeccontext_set_workaround_bugs(avcontext,
FFMPEG.FF_BUG_AUTODETECT);
if (FFMPEG.avcodec_open(avcontext, avcodec) < 0)
throw new RuntimeException("Could not open codec");
avframe = FFMPEG.avcodec_alloc_frame();
frameRGB = FFMPEG.avcodec_alloc_frame();
opened = true;
super.open();
}
@Override
public synchronized void close()
{
if (opened)
{
opened = false;
super.close();
FFMPEG.avcodec_close(avcontext);
FFMPEG.av_free(avcontext);
avcontext = 0;
FFMPEG.av_free(avframe);
avframe = 0;
}
}
public synchronized int process(Buffer inputBuffer, Buffer outputBuffer)
{
if (!checkInputBuffer(inputBuffer))
{
return BUFFER_PROCESSED_FAILED;
}
if (isEOM(inputBuffer) || !opened)
{
propagateEOM(outputBuffer);
return BUFFER_PROCESSED_OK;
}
if (inputBuffer.isDiscard())
{
inputBuffer.setDiscard(true);
reset();
return BUFFER_PROCESSED_OK;
}
if(waitingForMarker)
{
lastReceivedSeq = inputBuffer.getSequenceNumber();
if((inputBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0)
{
waitingForMarker = false;
outputBuffer.setDiscard(true);
return BUFFER_PROCESSED_OK;
}
else
return OUTPUT_BUFFER_NOT_FILLED;
}
if (lastReceivedSeq != -1
&& inputBuffer.getSequenceNumber() - lastReceivedSeq > 1)
{
long oldRecv = lastReceivedSeq;
lastReceivedSeq = inputBuffer.getSequenceNumber();
waitingForMarker = true;
logger.trace("DROP rtp data! " + oldRecv + "/" + lastReceivedSeq);
parser.reset();
reset();
return OUTPUT_BUFFER_NOT_FILLED;
}
else if (!parser.pushRTPInput(inputBuffer))
{
lastReceivedSeq = inputBuffer.getSequenceNumber();
return OUTPUT_BUFFER_NOT_FILLED;
}
lastReceivedSeq = inputBuffer.getSequenceNumber();
// decodes the data
got_picture[0] = false;
FFMPEG.avcodec_decode_video(avcontext, avframe, got_picture, parser
.getEncodedFrame(), parser.getEncodedFrameLen());
int avctxWidth = FFMPEG.avcodeccontext_get_width(avcontext);
if (avctxWidth != 0 && currentVideoWidth != avctxWidth)
{
currentVideoWidth = avctxWidth;
VideoFormat ivf = (VideoFormat) inputBuffer.getFormat();
VideoFormat ovf =
getVideoFormat(currentVideoWidth, ivf.getFrameRate());
if (ovf != null)
{
outputFormat = ovf;
}
}
outputBuffer.setFormat(outputFormat);
if (!got_picture[0])
{
outputBuffer.setDiscard(true);
return BUFFER_PROCESSED_OK;
}
// convert the picture in RGB Format
int avctxHeight = FFMPEG.avcodeccontext_get_height(avcontext);
int numBytes =
FFMPEG.avpicture_get_size(FFMPEG.PIX_FMT_RGB32, avctxWidth,
avctxHeight);
long buffer = FFMPEG.av_malloc(numBytes);
FFMPEG.avpicture_fill(frameRGB, buffer, FFMPEG.PIX_FMT_RGB32,
avctxWidth, avctxHeight);
// Convert the image from its native format to RGB
FFMPEG.img_convert(frameRGB, FFMPEG.PIX_FMT_RGB32, avframe, FFMPEG
.avcodeccontext_get_pix_fmt(avcontext), avctxWidth, avctxHeight);
Object outData = outputBuffer.getData();
int dataLength = numBytes / 4;
int[] data;
if ((outData instanceof int[])
&& ((int[]) outData).length >= dataLength)
data = (int[]) outData;
else
data = new int[dataLength];
FFMPEG
.memcpy(data, 0, dataLength, FFMPEG.avpicture_get_data0(frameRGB));
outputBuffer.setOffset(0);
outputBuffer.setLength(dataLength);
outputBuffer.setData(data);
FFMPEG.av_free(buffer);
return BUFFER_PROCESSED_OK;
}
public boolean checkFormat(Format format)
{
return format.getEncoding().equals(Constants.H264_RTP);
}
private VideoFormat getVideoFormat(double width, float frameRate)
{
for (int i = 0; i < outputFormats.length; i++)
{
VideoFormat vf = outputFormats[i];
Dimension size = vf.getSize();
if (size.getWidth() == width)
{
return new RGBFormat(size, -1, Format.intArray,
ensureFrameRate(frameRate), 32, RED_MASK, GREEN_MASK,
BLUE_MASK, 1, size.width, Format.FALSE,
Format.NOT_SPECIFIED);
}
}
return null;
}
private float ensureFrameRate(float frameRate)
{
return frameRate;
}
@Override
public String getName()
{
return PLUGIN_NAME;
}
@Override
public Format[] getSupportedOutputFormats(Format in)
{
// null input format
if (in == null)
{
return defaultOutputFormats;
}
// mismatch input format
if (!(in instanceof VideoFormat) || (matches(in, inputFormats) == null))
{
return new Format[0];
}
// match input format
return getMatchingOutputFormats(in);
}
/**
* Utility to perform format matching.
*/
public static Format matches(Format in, Format outs[])
{
for (int i = 0; i < outs.length; i++)
{
if (in.matches(outs[i]))
return outs[i];
}
return null;
}
}

@ -0,0 +1,343 @@
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.media.codec.video.h264;
import java.awt.*;
import javax.media.*;
import javax.media.format.*;
import net.java.sip.communicator.impl.media.codec.*;
import net.java.sip.communicator.impl.media.codec.video.*;
import net.sf.fmj.media.*;
/**
* Encodes supplied data in h264
*
* @author Damian Minkov
* @author Lubomir Marinov
*/
public class JNIEncoder
extends AbstractCodec
implements Codec
{
private static final String PLUGIN_NAME = "H.264 Encoder";
private static int DEF_WIDTH = 352;
private static int DEF_HEIGHT = 288;
private static final int INPUT_BUFFER_PADDING_SIZE = 8;
private static final Format[] defOutputFormats =
{ new VideoFormat(Constants.H264) };
private VideoFormat[] outputFormats = null;
// the frame rate we will use
private static final int TARGET_FRAME_RATE = 15;
// The codec we will use
private long avcontext;
// the encoded data is stored in avpicture
private long avframe;
// we use this buffer to supply data to encoder
private byte[] encFrameBuffer;
// the supplied data length
private int encFrameLen;
private long rawFrameBuffer;
// key frame every four seconds
private static final int IFRAME_INTERVAL = TARGET_FRAME_RATE * 4;
private int framesSinceLastIFrame = IFRAME_INTERVAL + 1;
/**
* Constructor
*/
public JNIEncoder()
{
DEF_WIDTH = Constants.VIDEO_WIDTH;
DEF_HEIGHT = Constants.VIDEO_HEIGHT;
int strideY = DEF_WIDTH;
int strideUV = strideY / 2;
int offsetU = strideY * DEF_HEIGHT;
int offsetV = offsetU + strideUV * DEF_HEIGHT / 2;
int inputYuvLength = (strideY + strideUV) * DEF_HEIGHT;
float sourceFrameRate = TARGET_FRAME_RATE;
inputFormats =
new Format[]
{ new YUVFormat(new Dimension(DEF_WIDTH, DEF_HEIGHT),
inputYuvLength + INPUT_BUFFER_PADDING_SIZE, Format.byteArray,
sourceFrameRate, YUVFormat.YUV_420, strideY, strideUV, 0,
offsetU, offsetV) };
inputFormat = null;
outputFormat = null;
}
private Format[] getMatchingOutputFormats(Format in)
{
VideoFormat videoIn = (VideoFormat) in;
Dimension inSize = videoIn.getSize();
outputFormats =
new VideoFormat[]
{ new VideoFormat(Constants.H264, inSize, Format.NOT_SPECIFIED,
Format.byteArray, videoIn.getFrameRate()) };
return outputFormats;
}
/**
* Return the list of formats supported at the output.
*/
public Format[] getSupportedOutputFormats(Format in)
{
// null input format
if (in == null)
return defOutputFormats;
// mismatch input format
if (!(in instanceof VideoFormat)
|| null == NativeDecoder.matches(in, inputFormats))
return new Format[0];
return getMatchingOutputFormats(in);
}
public Format setInputFormat(Format in)
{
// mismatch input format
if (!(in instanceof VideoFormat)
|| null == NativeDecoder.matches(in, inputFormats))
return null;
VideoFormat videoIn = (VideoFormat) in;
Dimension inSize = videoIn.getSize();
if (inSize == null)
inSize = new Dimension(DEF_WIDTH, DEF_HEIGHT);
YUVFormat yuv = (YUVFormat) videoIn;
if (yuv.getOffsetU() > yuv.getOffsetV())
return null;
int strideY = inSize.width;
int strideUV = strideY / 2;
int offsetU = strideY * inSize.height;
int offsetV = offsetU + strideUV * inSize.height / 2;
int inputYuvLength = (strideY + strideUV) * inSize.height;
float sourceFrameRate = videoIn.getFrameRate();
inputFormat =
new YUVFormat(inSize, inputYuvLength + INPUT_BUFFER_PADDING_SIZE,
Format.byteArray, sourceFrameRate, YUVFormat.YUV_420, strideY,
strideUV, 0, offsetU, offsetV);
// Return the selected inputFormat
return inputFormat;
}
public Format setOutputFormat(Format out)
{
// mismatch output format
if (!(out instanceof VideoFormat)
|| null == NativeDecoder.matches(out,
getMatchingOutputFormats(inputFormat)))
return null;
VideoFormat videoOut = (VideoFormat) out;
Dimension outSize = videoOut.getSize();
if (outSize == null)
{
Dimension inSize = ((VideoFormat) inputFormat).getSize();
if (inSize == null)
outSize = new Dimension(DEF_WIDTH, DEF_HEIGHT);
else
outSize = inSize;
}
outputFormat =
new VideoFormat(videoOut.getEncoding(), outSize, outSize.width
* outSize.height, Format.byteArray, videoOut.getFrameRate());
// Return the selected outputFormat
return outputFormat;
}
public synchronized int process(Buffer inBuffer, Buffer outBuffer)
{
if (isEOM(inBuffer))
{
propagateEOM(outBuffer);
reset();
return BUFFER_PROCESSED_OK;
}
if (inBuffer.isDiscard())
{
outBuffer.setDiscard(true);
reset();
return BUFFER_PROCESSED_OK;
}
Format inFormat = inBuffer.getFormat();
if (inFormat != inputFormat && !(inFormat.matches(inputFormat)))
{
setInputFormat(inFormat);
}
if (inBuffer.getLength() < 10)
{
outBuffer.setDiscard(true);
reset();
return BUFFER_PROCESSED_OK;
}
// copy data to avframe
FFMPEG.memcpy(rawFrameBuffer, (byte[]) inBuffer.getData(), inBuffer
.getOffset(), encFrameLen);
if (framesSinceLastIFrame >= IFRAME_INTERVAL)
{
FFMPEG.avframe_set_key_frame(avframe, true);
framesSinceLastIFrame = 0;
}
else
{
framesSinceLastIFrame++;
FFMPEG.avframe_set_key_frame(avframe, false);
}
// encode data
int encLen =
FFMPEG.avcodec_encode_video(avcontext, encFrameBuffer, encFrameLen,
avframe);
byte[] r = new byte[encLen];
System.arraycopy(encFrameBuffer, 0, r, 0, r.length);
outBuffer.setData(r);
outBuffer.setLength(r.length);
outBuffer.setOffset(0);
return BUFFER_PROCESSED_OK;
}
@Override
public synchronized void open() throws ResourceUnavailableException
{
if (opened)
return;
if (inputFormat == null)
throw new ResourceUnavailableException("No input format selected");
if (outputFormat == null)
throw new ResourceUnavailableException("No output format selected");
long avcodec = FFMPEG.avcodec_find_encoder(FFMPEG.CODEC_ID_H264);
avcontext = FFMPEG.avcodec_alloc_context();
FFMPEG.avcodeccontext_set_pix_fmt(avcontext, FFMPEG.PIX_FMT_YUV420P);
FFMPEG.avcodeccontext_set_size(avcontext, DEF_WIDTH, DEF_HEIGHT);
FFMPEG.avcodeccontext_set_qcompress(avcontext, 0.6f);
int _bitRate = 768000;
// average bit rate
FFMPEG.avcodeccontext_set_bit_rate(avcontext, _bitRate);
// so to be 1 in x264
FFMPEG.avcodeccontext_set_bit_rate_tolerance(avcontext, _bitRate);
FFMPEG.avcodeccontext_set_rc_max_rate(avcontext, _bitRate);
FFMPEG.avcodeccontext_set_sample_aspect_ratio(avcontext, 0, 0);
FFMPEG.avcodeccontext_set_thread_count(avcontext, 0);
FFMPEG.avcodeccontext_set_time_base(avcontext, 1000, 25500); // ???
FFMPEG.avcodeccontext_set_quantizer(avcontext, 10, 51, 4);
// avcontext.chromaoffset = -2;
FFMPEG.avcodeccontext_add_partitions(avcontext, 0x111);
// X264_PART_I4X4 0x001
// X264_PART_P8X8 0x010
// X264_PART_B8X8 0x100
FFMPEG.avcodeccontext_set_mb_decision(avcontext,
FFMPEG.FF_MB_DECISION_SIMPLE);
FFMPEG.avcodeccontext_set_rc_eq(avcontext, "blurCplx^(1-qComp)");
FFMPEG.avcodeccontext_add_flags(avcontext,
FFMPEG.CODEC_FLAG_LOOP_FILTER);
FFMPEG.avcodeccontext_set_me_method(avcontext, 1);
FFMPEG.avcodeccontext_set_me_subpel_quality(avcontext, 6);
FFMPEG.avcodeccontext_set_me_range(avcontext, 16);
FFMPEG.avcodeccontext_set_me_cmp(avcontext, FFMPEG.FF_CMP_CHROMA);
FFMPEG.avcodeccontext_set_scenechange_threshold(avcontext, 40);
// Constant quality mode (also known as constant ratefactor)
FFMPEG.avcodeccontext_set_crf(avcontext, 0);
FFMPEG.avcodeccontext_set_rc_buffer_size(avcontext, 10000000);
FFMPEG.avcodeccontext_set_gop_size(avcontext, IFRAME_INTERVAL);
FFMPEG.avcodeccontext_set_i_quant_factor(avcontext, 1f / 1.4f);
if (FFMPEG.avcodec_open(avcontext, avcodec) < 0)
throw new RuntimeException("Could not open codec");
encFrameLen = (DEF_WIDTH * DEF_HEIGHT * 3) / 2;
rawFrameBuffer = FFMPEG.av_malloc(encFrameLen);
avframe = FFMPEG.avcodec_alloc_frame();
int size = DEF_WIDTH * DEF_HEIGHT;
FFMPEG.avframe_set_data(avframe, rawFrameBuffer, size, size / 4);
FFMPEG.avframe_set_linesize(avframe, DEF_WIDTH, DEF_WIDTH / 2,
DEF_WIDTH / 2);
encFrameBuffer = new byte[encFrameLen];
opened = true;
super.open();
}
@Override
public synchronized void close()
{
if (opened)
{
opened = false;
super.close();
FFMPEG.avcodec_close(avcontext);
FFMPEG.av_free(avcontext);
avcontext = 0;
FFMPEG.av_free(avframe);
avframe = 0;
FFMPEG.av_free(rawFrameBuffer);
rawFrameBuffer = 0;
encFrameBuffer = null;
}
}
@Override
public String getName()
{
return PLUGIN_NAME;
}
}
Loading…
Cancel
Save