Commit c675c3b7 authored by Simon Morlat's avatar Simon Morlat
Browse files

improvements in media codec support.

This fixes the following (at least):
- encoder is instanciated first, which is better because there are devices that can't instanciate both hardware encoder and decoder at the same time, the second one be forcibly a software fallback
- encoder is reset() upon device rotation, because some encoders don't accept a reconfiguration otherwise
- decoder is reset() only if flush() doesn't work
- fixes the lack of use of 'offset' parameter in MediaBufferInfo provided by the encoder - could lead in outputing corrupted h264 bitstream.
- use getInputBuffer() and getOutputBuffer() instead of deprecated getInputBuffers() and getOutputBuffers()

Lots of cleanups and clarification, and new debug logs.
parent a65e4fb4
......@@ -35,9 +35,12 @@ extern "C"{
typedef enum{
Rfc3984FrameAvailable = 1,
Rfc3984FrameCorrupted = 1<<1,
Rfc3984IsKeyFrame = 1<<2,
Rfc3984IsKeyFrame = 1<<2, /*set when a frame has SPS + PPS or IDR (possibly both)*/
Rfc3984NewSPS = 1<<3,
Rfc3984NewPPS = 1<<4
Rfc3984NewPPS = 1<<4,
Rfc3984HasSPS = 1<<5,
Rfc3984HasPPS = 1<<6,
Rfc3984HasIDR = 1<<7,
}Rfc3984Status;
typedef struct Rfc3984Context{
......
......@@ -41,8 +41,8 @@ struct AMediaCodec {
jmethodID release;
jmethodID flush;
jmethodID stop;
jmethodID getInputBuffers;
jmethodID getOutputBuffers;
jmethodID getInputBuffer;
jmethodID getOutputBuffer;
jmethodID dequeueInputBuffer;
jmethodID queueInputBuffer;
jmethodID dequeueOutputBuffer;
......@@ -156,8 +156,8 @@ bool AMediaCodec_loadMethodID(const char *createName, AMediaCodec *codec, const
success &= _getMethodID(env, mediaCodecClass, "release", "()V", &(codec->release));
success &= _getMethodID(env, mediaCodecClass, "flush", "()V", &(codec->flush));
success &= _getMethodID(env, mediaCodecClass, "stop", "()V", &(codec->stop));
success &= _getMethodID(env, mediaCodecClass, "getInputBuffers", "()[Ljava/nio/ByteBuffer;", &(codec->getInputBuffers));
success &= _getMethodID(env, mediaCodecClass, "getOutputBuffers","()[Ljava/nio/ByteBuffer;", &(codec->getOutputBuffers));
success &= _getMethodID(env, mediaCodecClass, "getInputBuffer", "(I)Ljava/nio/ByteBuffer;", &(codec->getInputBuffer));
success &= _getMethodID(env, mediaCodecClass, "getOutputBuffer","(I)Ljava/nio/ByteBuffer;", &(codec->getOutputBuffer));
success &= _getMethodID(env, mediaCodecClass, "dequeueInputBuffer", "(J)I", &(codec->dequeueInputBuffer));
success &= _getMethodID(env, mediaCodecClass, "queueInputBuffer", "(IIIJI)V", &(codec->queueInputBuffer));
success &= _getMethodID(env, mediaCodecClass, "dequeueOutputBuffer", "(Landroid/media/MediaCodec$BufferInfo;J)I", &(codec->dequeueOutputBuffer));
......@@ -269,9 +269,8 @@ media_status_t AMediaCodec_flush(AMediaCodec *codec) {
JNIEnv *env = ms_get_jni_env();
env->CallVoidMethod(codec->jcodec, codec->flush);
handle_java_exception();
return AMEDIA_OK;
return (handle_java_exception() == -1) ? AMEDIA_ERROR_BASE : AMEDIA_OK;
}
media_status_t AMediaCodec_stop(AMediaCodec *codec) {
......@@ -283,97 +282,39 @@ media_status_t AMediaCodec_stop(AMediaCodec *codec) {
}
//API 21
/*uint8_t* AMediaCodec_getInputBuffer(AMediaCodec *codec, size_t idx, size_t *out_size){
JNIEnv *env = ms_get_jni_env();
jobject jbuffer;
uint8_t *buf;
jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
jmethodID jmethodID = env->GetMethodID(mediaCodecClass,"getInputBuffer","(I)Ljava/nio/ByteBuffer;");
if (jmethodID != NULL){
jbuffer = env->CallObjectMethod(codec->jcodec,jmethodID,(int)idx);
if(jbuffer == NULL){
return NULL;
}
buf = (uint8_t *) env->GetDirectBufferAddress(jbuffer);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
ms_error("Exception");
}
} else {
ms_error("getInputBuffer() not found in class mediacodec !");
env->ExceptionClear(); //very important.
return NULL;
}
env->DeleteLocalRef(mediaCodecClass);
return buf;
}*/
//API 19
uint8_t* AMediaCodec_getInputBuffer(AMediaCodec *codec, size_t idx, size_t *out_size) {
JNIEnv *env = ms_get_jni_env();
jobject object = NULL;
jobject jbuf = NULL;
uint8_t *buf = NULL;
object = env->CallObjectMethod(codec->jcodec, codec->getInputBuffers);
if(object != NULL){
jobjectArray jbuffers = reinterpret_cast<jobjectArray>(object);
jobject jbuf = env->GetObjectArrayElement(jbuffers,idx);
jbuf = env->CallObjectMethod(codec->jcodec, codec->getInputBuffer, (jint) idx);
if(jbuf != NULL){
jlong capacity = env->GetDirectBufferCapacity(jbuf);
*out_size = (size_t) capacity;
buf = (uint8_t *) env->GetDirectBufferAddress(jbuf);
env->DeleteLocalRef(jbuf);
env->DeleteLocalRef(object);
} else {
ms_error("getInputBuffers() not found in class mediacodec !");
ms_error("getInputBuffer() failed !");
env->ExceptionClear();
}
handle_java_exception();
return buf;
}
/*
uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *codec, size_t idx, size_t *out_size){
JNIEnv *env = ms_get_jni_env();
jobject jbuffer;
uint8_t *buf;
jclass mediaCodecClass = env->FindClass("android/media/MediaCodec");
jmethodID jmethodID = env->GetMethodID(mediaCodecClass,"getOutputBuffer","(I)Ljava/nio/ByteBuffer;");
if (jmethodID != NULL){
jbuffer = env->CallObjectMethod(codec->jcodec,jmethodID,(int)idx);
if(jbuffer == NULL){
return NULL;
}
buf = (uint8_t *) env->GetDirectBufferAddress(jbuffer);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
ms_error("Exception");
}
} else {
ms_error("getOutputBuffer() not found in class mediacodec !");
env->ExceptionClear(); //very important.
return NULL;
}
env->DeleteLocalRef(mediaCodecClass);
return buf;
}*/
uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *codec, size_t idx, size_t *out_size) {
JNIEnv *env = ms_get_jni_env();
jobject object = NULL;
jobject jbuf = NULL;
uint8_t *buf = NULL;
jlong capacity;
object = env->CallObjectMethod(codec->jcodec, codec->getOutputBuffers);
if(object != NULL){
jobjectArray jbuffers = reinterpret_cast<jobjectArray>(object);
jobject jbuf = env->GetObjectArrayElement(jbuffers, idx);
jbuf = env->CallObjectMethod(codec->jcodec, codec->getOutputBuffer, (jint) idx);
if (jbuf != NULL){
buf = (uint8_t *) env->GetDirectBufferAddress(jbuf);
capacity = env->GetDirectBufferCapacity(jbuf);
*out_size = (size_t) capacity;
env->DeleteLocalRef(jbuf);
env->DeleteLocalRef(object);
} else {
ms_error("getOutputBuffer() not found in class mediacodec !");
ms_error("getOutputBuffer() failed !");
env->ExceptionClear();
}
handle_java_exception();
......@@ -475,11 +416,11 @@ void AMediaCodec_setParams(AMediaCodec *codec, const char *params) {
return;
}
jstring msg = env->NewStringUTF("request-sync");
jstring msg = env->NewStringUTF(params);
jbundle = env->NewObject(BundleClass, codec->_init_BundleClass);
env->CallVoidMethod(jbundle, codec->putIntId, msg, 0);
env->CallVoidMethod(jbundle, codec->putIntId, msg, 0);
handle_java_exception();
env->DeleteLocalRef(msg);
env->DeleteLocalRef(msg);
env->CallVoidMethod(codec->jcodec, codec->setParameters, jbundle);
handle_java_exception();
......
......@@ -42,19 +42,27 @@ typedef struct _DecData {
uint8_t *bitstream;
int bitstream_size;
MSYuvBufAllocator *buf_allocator;
bool_t first_buffer_queued;
bool_t buffer_queued;
bool_t first_image_decoded;
bool_t avpf_enabled;
bool_t first_i_frame_queued;
bool_t need_key_frame;
bool_t freeze_on_error;
bool_t useMediaImage;
} DecData;
static int dec_init_format(DecData *d) {
static int dec_init_mediacodec(DecData *d) {
AMediaFormat *format;
media_status_t status = 0;
if (d->codec == NULL){
d->codec = AMediaCodec_createDecoderByType("video/avc");
if (d->codec == NULL){
ms_error("MSMediaCodecH264Dec: could not create MediaCodec");
return AMEDIA_ERROR_UNKNOWN;
}
}
format = AMediaFormat_new();
AMediaFormat_setString(format, "mime", "video/avc");
//Size mandatory for decoder configuration
......@@ -72,6 +80,7 @@ static int dec_init_format(DecData *d) {
ms_error("MSMediaCodecH264Dec: starting failure: %i", (int)status);
goto end;
}
d->need_key_frame = TRUE;
end:
AMediaFormat_delete(format);
......@@ -79,12 +88,11 @@ end:
}
static void dec_init(MSFilter *f) {
AMediaCodec *codec = AMediaCodec_createDecoderByType("video/avc");
DecData *d = ms_new0(DecData, 1);
ms_message("MSMediaCodecH264Dec initialization");
f->data = d;
d->codec = codec;
d->codec = NULL;
d->sps = NULL;
d->pps = NULL;
rfc3984_init(&d->unpacker);
......@@ -97,43 +105,23 @@ static void dec_init(MSFilter *f) {
d->bitstream = ms_malloc0(d->bitstream_size);
d->buf_allocator = ms_yuv_buf_allocator_new();
ms_average_fps_init(&d->fps, " H264 decoder: FPS: %f");
if (dec_init_format(d) != 0) {
AMediaCodec_delete(d->codec);
d->codec = NULL;
}
}
static void dec_preprocess(MSFilter *f) {
DecData *s = (DecData *)f->data;
s->first_image_decoded = FALSE;
DecData *d = (DecData *)f->data;
d->first_image_decoded = FALSE;
/*we shall allocate the MediaCodec decoder the last as possible and after the encoder, because
* on some phones hardware encoder and decoders can't be allocated at the same time.
* So let's give preference to the encoder.
**/
if (d->codec == NULL) dec_init_mediacodec(d);
}
static void dec_postprocess(MSFilter *f) {
}
#if 0
static void dec_reinit(DecData *d) {
AMediaFormat *format;
AMediaCodec_flush(d->codec);
AMediaCodec_stop(d->codec);
AMediaCodec_delete(d->codec);
ms_message("Restart dec");
d->codec = AMediaCodec_createDecoderByType("video/avc");
format = AMediaFormat_new();
AMediaFormat_setString(format, "mime", "video/avc");
//Size mandatory for decoder configuration
AMediaFormat_setInt32(format, "width", 1920);
AMediaFormat_setInt32(format, "height", 1080);
AMediaCodec_configure(d->codec, format, NULL, NULL, 0);
AMediaCodec_start(d->codec);
AMediaFormat_delete(format);
}
#endif
static void dec_uninit(MSFilter *f) {
DecData *d = (DecData *)f->data;
rfc3984_uninit(&d->unpacker);
......@@ -273,20 +261,17 @@ static int nalusToFrame(DecData *d, MSQueue *naluq, bool_t *new_sps_pps) {
freemsg(im);
}
return dst - d->bitstream;
}
static void handle_decoding_error(DecData *d, bool_t request_reset, bool_t *request_pli) {
if (request_reset) {
static void dec_flush(DecData *d, bool_t with_reset){
if (with_reset || (AMediaCodec_flush(d->codec) != 0)){
AMediaCodec_reset(d->codec);
d->first_buffer_queued = FALSE;
d->first_i_frame_queued = FALSE;
dec_init_format(d);
dec_init_mediacodec(d);
}
if (request_pli != NULL) *request_pli = TRUE;
};
d->need_key_frame = TRUE;
d->buffer_queued = FALSE;
}
static void dec_process(MSFilter *f) {
DecData *d = (DecData *)f->data;
......@@ -306,10 +291,10 @@ static void dec_process(MSFilter *f) {
}
if (d->packet_num == 0 && d->sps && d->pps) {
rfc3984_unpack_out_of_band_sps_pps(&d->unpacker, d->sps, d->pps);
d->sps = NULL;
d->pps = NULL;
}
rfc3984_unpack_out_of_band_sps_pps(&d->unpacker, d->sps, d->pps);
d->sps = NULL;
d->pps = NULL;
}
ms_queue_init(&nalus);
......@@ -327,12 +312,12 @@ static void dec_process(MSFilter *f) {
request_pli = TRUE;
if (d->freeze_on_error){
ms_queue_flush(&nalus);
d->first_i_frame_queued = FALSE;
d->need_key_frame = TRUE;
continue;
}
}
if (!d->first_i_frame_queued && !(unpacking_ret & Rfc3984IsKeyFrame)) {
if (d->need_key_frame && !(unpacking_ret & Rfc3984IsKeyFrame)) {
request_pli = TRUE;
ms_queue_flush(&nalus);
continue;
......@@ -343,10 +328,9 @@ static void dec_process(MSFilter *f) {
size = nalusToFrame(d, &nalus, &need_reinit);
if (need_reinit) {
//In case of rotation, the decoder needs to flushed in order to restart with the new video size
ms_message("MSMediaCodecH264Dec: video size has changed. Flushing all MediaCodec's buffers");
AMediaCodec_flush(d->codec);
d->first_buffer_queued = FALSE;
//In case of remote rotation, the decoder needs to flushed in order to restart with the new video size
ms_message("MSMediaCodecH264Dec: SPS/PPS have changed. Flushing all MediaCodec's buffers");
dec_flush(d, TRUE);
}
/*First put our H264 bitstream into the decoder*/
......@@ -357,43 +341,58 @@ static void dec_process(MSFilter *f) {
if (buf == NULL) {
ms_error("MSMediaCodecH264Dec: AMediaCodec_getInputBuffer() returned NULL");
handle_decoding_error(d, TRUE, &request_pli);
break;
continue;
}
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
if ((size_t)size > bufsize) {
ms_error("Cannot copy the bitstream into the input buffer size : %i and bufsize %i", size, (int) bufsize);
handle_decoding_error(d, TRUE, &request_pli);
break;
} else {
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
memcpy(buf, d->bitstream, (size_t)size);
ms_error("Cannot copy the all the bitstream into the input buffer size : %i and bufsize %i", size, (int) bufsize);
size = MIN((size_t)size, bufsize);
}
memcpy(buf, d->bitstream, size);
if (!d->first_i_frame_queued) ms_message("MSMediaCodecH264Dec: passing first I-frame to the decoder");
if (d->need_key_frame){
ms_message("MSMediaCodecH264Dec: fresh I-frame submitted to the decoder");
d->need_key_frame = FALSE;
}
AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, (ts.tv_nsec / 1000) + 10000LL, 0);
d->first_buffer_queued = TRUE;
d->first_i_frame_queued = TRUE;
if (AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, (ts.tv_nsec / 1000) + 10000LL, 0) == 0){
if (!d->buffer_queued) d->buffer_queued = TRUE;
}else{
ms_error("MSMediaCodecH264Dec: AMediaCodec_queueInputBuffer() had an exception");
dec_flush(d, FALSE);
request_pli = TRUE;
continue;
}
} else if (iBufidx == AMEDIA_ERROR_UNKNOWN) {
} else if (iBufidx == -1){
ms_error("MSMediaCodecH264Dec: no buffer available for queuing this frame ! Decoder is too slow.");
/*
* This is a problematic case because we can't wait the decoder to be ready, otherwise we'll freeze the entire
* video MSTicker thread.
* We have no other option to drop the frame, and retry later, but with an I-frame of course.
**/
request_pli = TRUE;
d->need_key_frame = TRUE;
continue;
}else {
ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueInputBuffer() had an exception");
handle_decoding_error(d, TRUE, &request_pli);
dec_flush(d, FALSE);
request_pli = TRUE;
continue;
}
d->packet_num++;
}
/*secondly try to get decoded frames from the decoder, this is performed every tick*/
while (d->first_buffer_queued && (oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US)) >= 0) {
while (d->buffer_queued && (oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US)) >= 0) {
AMediaFormat *format;
int width = 0, height = 0, color = 0;
uint8_t *buf = AMediaCodec_getOutputBuffer(d->codec, oBufidx, &bufsize);
if (buf == NULL) {
ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_DECODING_ERRORS);
ms_error("MSMediaCodecH264Dec: AMediaCodec_getOutputBuffer() returned NULL");
handle_decoding_error(d, FALSE, &request_pli);
continue;
}
format = AMediaCodec_getOutputFormat(d->codec);
......@@ -408,52 +407,50 @@ static void dec_process(MSFilter *f) {
AMediaFormat_delete(format);
}
if (buf != NULL && d->sps && d->pps) { /*some decoders output garbage while no sps or pps have been received yet !*/
if (width != 0 && height != 0) {
if (d->useMediaImage) {
AMediaImage image;
int dst_pix_strides[4] = {1, 1, 1, 1};
MSRect dst_roi = {0, 0, pic.w, pic.h};
if (AMediaCodec_getOutputImage(d->codec, oBufidx, &image)) {
om = ms_yuv_buf_allocator_get(d->buf_allocator, &pic, width, height);
ms_yuv_buf_copy_with_pix_strides(image.buffers, image.row_strides, image.pixel_strides, image.crop_rect,
pic.planes, pic.strides, dst_pix_strides, dst_roi);
AMediaImage_close(&image);
}
} else {
if (color == 19) {
//YUV
int ysize = width * height;
int usize = ysize / 4;
om = ms_yuv_buf_allocator_get(d->buf_allocator, &pic, width, height);
memcpy(pic.planes[0], buf, ysize);
memcpy(pic.planes[1], buf + ysize, usize);
memcpy(pic.planes[2], buf + ysize + usize, usize);
} else {
uint8_t *cbcr_src = (uint8_t *)(buf + width * height);
om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
}
if (width != 0 && height != 0) {
if (d->useMediaImage) {
AMediaImage image;
int dst_pix_strides[4] = {1, 1, 1, 1};
MSRect dst_roi = {0, 0, pic.w, pic.h};
if (AMediaCodec_getOutputImage(d->codec, oBufidx, &image)) {
om = ms_yuv_buf_allocator_get(d->buf_allocator, &pic, width, height);
ms_yuv_buf_copy_with_pix_strides(image.buffers, image.row_strides, image.pixel_strides, image.crop_rect,
pic.planes, pic.strides, dst_pix_strides, dst_roi);
AMediaImage_close(&image);
}
if (!d->first_image_decoded) {
ms_message("First frame decoded %ix%i", width, height);
d->first_image_decoded = TRUE;
ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
} else {
if (color == 19) {
//YUV
int ysize = width * height;
int usize = ysize / 4;
om = ms_yuv_buf_allocator_get(d->buf_allocator, &pic, width, height);
memcpy(pic.planes[0], buf, ysize);
memcpy(pic.planes[1], buf + ysize, usize);
memcpy(pic.planes[2], buf + ysize + usize, usize);
} else {
uint8_t *cbcr_src = (uint8_t *)(buf + width * height);
om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
}
}
ms_queue_put(f->outputs[0], om);
} else {
ms_error("MSMediaCodecH264Dec: width and height are not known !");
handle_decoding_error(d, FALSE, &request_pli);
if (!d->first_image_decoded) {
ms_message("First frame decoded %ix%i", width, height);
d->first_image_decoded = TRUE;
ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
}
ms_queue_put(f->outputs[0], om);
} else {
ms_error("MSMediaCodecH264Dec: width and height are not known !");
}
AMediaCodec_releaseOutputBuffer(d->codec, oBufidx, FALSE);
}
if (oBufidx == AMEDIA_ERROR_UNKNOWN) {
ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueOutputBuffer() had an exception");
handle_decoding_error(d, FALSE, &request_pli);
dec_flush(d, FALSE);
request_pli = TRUE;
}
if (d->avpf_enabled && request_pli) {
......
......@@ -78,6 +78,17 @@ static void set_mblk(mblk_t **packet, mblk_t *newone) {
*packet = newone;
}
static int alloc_encoder(EncData *d){
if (!d->codec){
d->codec = AMediaCodec_createEncoderByType("video/avc");
if (!d->codec) {
ms_error("MSMediaCodecH264Enc: could not create MediaCodec");
return AMEDIA_ERROR_UNKNOWN;
}
}
return 0;
}
static void enc_init(MSFilter *f) {
MSVideoSize vsize;
EncData *d = ms_new0(EncData, 1);
......@@ -92,6 +103,10 @@ static void enc_init(MSFilter *f) {
MS_VIDEO_SIZE_ASSIGN(vsize, CIF);
d->vconf = ms_video_find_best_configuration_for_size(d->vconf_list, vsize, ms_factory_get_cpu_count(f->factory));
d->codec_started = FALSE;
/*we shall allocate the MediaCodec encoder the sooner as possible and before the decoder, because
* on some phones hardware encoder and decoders can't be allocated at the same time.
* */
alloc_encoder(d);
f->data = d;
}
......@@ -109,13 +124,9 @@ static int enc_configure(EncData *d){
media_status_t status = AMEDIA_ERROR_UNSUPPORTED;
AMediaFormat *format;
if (!d->codec){
d->codec = AMediaCodec_createEncoderByType("video/avc");
if (!d->codec) {
ms_error("MSMediaCodecH264Enc: could not create MediaCodec");
return AMEDIA_ERROR_UNKNOWN;
}
}
status = alloc_encoder(d);
if (status != 0) return status;
d->codec_lost = FALSE;
d->codec_started = FALSE;
format = AMediaFormat_new();
......@@ -172,9 +183,7 @@ static void enc_preprocess(MSFilter *f) {
rfc3984_set_mode(d->packer, d->mode);
rfc3984_enable_stap_a(d->packer, FALSE);
ms_video_starter_init(&d->starter);
ms_iframe_requests_limiter_init(&d->iframe_limiter, 1000);
ms_iframe_requests_limiter_init(&d->iframe_limiter, 1000);
}
static void enc_postprocess(MSFilter *f) {
......@@ -187,8 +196,11 @@ static void enc_postprocess(MSFilter *f) {
if (d->codec) {
if (d->codec_started){
//AMediaCodec_flush(d->codec);
AMediaCodec_flush(d->codec);
AMediaCodec_stop(d->codec);
//It is preferable to reset the encoder, otherwise it may not accept a new configuration while returning in preprocess().
//This was observed at least on Moto G2, with qualcomm encoder.
AMediaCodec_reset(d->codec);
d->codec_started = FALSE;
}
}
......@@ -238,7 +250,8 @@ static void enc_process(MSFilter *f) {
if (ms_iframe_requests_limiter_iframe_requested(&d->iframe_limiter, f->ticker->time) ||
(d->avpf_enabled == FALSE && ms_video_starter_need_i_frame(&d->starter, f->ticker->time))) {
/*Force a key-frame*/
AMediaCodec_setParams(d->codec, "");
AMediaCodec_setParams(d->codec, "request-sync");
ms_error("MSMediaCodecH264Enc: I-frame requested to MediaCodec");
ms_iframe_requests_limiter_notify_iframe_sent(&d->iframe_limiter, f->ticker->time);
}
......@@ -308,7 +321,7 @@ static void enc_process(MSFilter *f) {
MSQueue nalus;
ms_queue_init(&nalus);
ms_h264_bitstream_to_nalus(buf, info.size, &nalus);
ms_h264_bitstream_to_nalus(buf + info.offset, info.size, &nalus);
if (!ms_queue_empty(&nalus)) {
m = ms_queue_peek_first(&nalus);
......
......@@ -281,6 +281,14 @@ static unsigned int output_frame(Rfc3984Context * ctx, MSQueue *out, unsigned in
ctx->pps = NULL;
}
/* Log some bizarre things */
if ((res & Rfc3984FrameCorrupted) == 0){
if ((res & Rfc3984HasSPS) && (res & Rfc3984HasPPS) && !(res & Rfc3984HasIDR)){
/*some decoders may not be happy with this*/
ms_warning("rfc3984_unpack: a frame with SPS+PPS but no IDR was output.");