Commit 6677a228 authored by François Grisez's avatar François Grisez

Reworking of MediaCodec H264 codec filter

Use getInputImage() and getOutputImage() methods to put/get raw pictures to
MediaCodec. Those are available since Android SDK v22 (Android 5.1).

Phones running older version of Android still use legacy code based on
getInputBuffer()/getOutputBuffer() methods.
parent 8b4fca52
......@@ -34,6 +34,10 @@ JavaVM *ms_get_jvm(void);
JNIEnv *ms_get_jni_env(void);
#ifdef ANDROID
int ms_get_android_sdk_version(void);
#endif
#ifdef __cplusplus
}
#endif
......
......@@ -262,6 +262,8 @@ The returned mblk_t points to the external buffer, which is not copied, nor ref'
MS2_PUBLIC mblk_t * ms_yuv_buf_alloc_from_buffer(int w, int h, mblk_t* buffer);
MS2_PUBLIC void ms_yuv_buf_copy(uint8_t *src_planes[], const int src_strides[],
uint8_t *dst_planes[], const int dst_strides[], MSVideoSize roi);
MS2_PUBLIC void ms_yuv_buf_copy_with_pix_strides(uint8_t *src_planes[], const int src_row_strides[], const int src_pix_strides[], MSRect src_roi,
uint8_t *dst_planes[], const int dst_row_strides[], const int dst_pix_strides[], MSRect dst_roi);
MS2_PUBLIC void ms_yuv_buf_mirror(YuvBuf *buf);
MS2_PUBLIC void ms_yuv_buf_mirrors(YuvBuf *buf,const MSMirrorType type);
MS2_PUBLIC void rgb24_mirror(uint8_t *buf, int w, int h, int linesize);
......
......@@ -565,6 +565,166 @@ void AMediaCodec_setParams(AMediaCodec *codec, const char *params){
env->DeleteLocalRef(mediaCodecClass);
}
static bool _loadClass(JNIEnv *env, const char *className, jclass *_class) {
*_class = env->FindClass(className);
if(handle_java_exception() == -1 || *_class == NULL) {
ms_error("Could not load Java class [%s]", className);
return false;
}
return true;
}
static bool _getMethodID(JNIEnv *env, jclass _class, const char *name, const char *sig, jmethodID *method) {
*method = env->GetMethodID(_class, name, sig);
if(handle_java_exception() == -1 || *method == NULL) {
ms_error("Could not get method %s[%s]", name, sig);
return false;
}
return true;
}
static bool _getFieldID(JNIEnv *env, jclass _class, const char *name, const char *sig, jfieldID *field) {
*field = env->GetFieldID(_class, name, sig);
if(handle_java_exception() == -1 || *field == NULL) {
ms_error("Could not get field %s[%s]", name, sig);
return false;
}
return true;
}
static bool _getImage(JNIEnv *env, AMediaCodec *codec, const char *methodName, int index, AMediaImage *image) {
jclass mediaCodecClass = NULL, imageClass = NULL, planeClass = NULL, rectClass = NULL;
jobject jimage = NULL, jrect = NULL;
jobjectArray jplanes = NULL;
jmethodID getOutputImageMethod;
jmethodID getFormatMethod, getWidthMethod, getHeightMethod, getTimestrampMethod, getPlanesMethod, getCropRectMethod;
jmethodID getPixelStrideMethod, getRowStrideMethod, getBufferMethod;
jfieldID bottomField, leftField, rightField, topField;
bool success = true;
int bottom, left, right, top;
success = success && _loadClass(env, "android/media/MediaCodec", &mediaCodecClass);
success = success && _loadClass(env, "android/media/Image", &imageClass);
success = success && _loadClass(env, "android/media/Image$Plane", &planeClass);
success = success && _loadClass(env, "android/graphics/Rect", &rectClass);
if(!success) {
ms_error("%s(): one class could not be found", __FUNCTION__);
goto end;
}
success = success && _getMethodID(env, mediaCodecClass, methodName, "(I)Landroid/media/Image;", &getOutputImageMethod);
success = success && _getMethodID(env, imageClass, "getFormat", "()I", &getFormatMethod);
success = success && _getMethodID(env, imageClass, "getWidth", "()I", &getWidthMethod);
success = success && _getMethodID(env, imageClass, "getHeight", "()I", &getHeightMethod);
success = success && _getMethodID(env, imageClass, "getTimestamp", "()J", &getTimestrampMethod);
success = success && _getMethodID(env, imageClass, "getPlanes", "()[Landroid/media/Image$Plane;", &getPlanesMethod);
success = success && _getMethodID(env, imageClass, "getCropRect", "()Landroid/graphics/Rect;", &getCropRectMethod);
success = success && _getMethodID(env, planeClass, "getPixelStride", "()I", &getPixelStrideMethod);
success = success && _getMethodID(env, planeClass, "getRowStride", "()I", &getRowStrideMethod);
success = success && _getMethodID(env, planeClass, "getBuffer", "()Ljava/nio/ByteBuffer;", &getBufferMethod);
success = success && _getFieldID(env, rectClass, "bottom", "I", &bottomField);
success = success && _getFieldID(env, rectClass, "left", "I", &leftField);
success = success && _getFieldID(env, rectClass, "right", "I", &rightField);
success = success && _getFieldID(env, rectClass, "top", "I", &topField);
if(!success) {
ms_error("%s(): one method or field could not be found", __FUNCTION__);
goto end;
}
jimage = env->CallObjectMethod(codec->jcodec, getOutputImageMethod, index);
if(handle_java_exception() == -1 || jimage == NULL) {
ms_error("%s(): could not get the output image with index [%d]", __FUNCTION__, index);
success = false;
goto end;
}
image->format = env->CallIntMethod(jimage, getFormatMethod);
image->width = env->CallIntMethod(jimage, getWidthMethod);
image->height = env->CallIntMethod(jimage, getHeightMethod);
image->timestamp = env->CallLongMethod(jimage, getTimestrampMethod);
jrect = env->CallObjectMethod(jimage, getCropRectMethod);
if(jrect == NULL) {
ms_error("%s: could not get crop rectangle", __FUNCTION__);
goto end;
}
bottom = env->GetIntField(jrect, bottomField);
left = env->GetIntField(jrect, leftField);
right = env->GetIntField(jrect, rightField);
top = env->GetIntField(jrect, topField);
image->crop_rect.x = left;
image->crop_rect.y = top;
image->crop_rect.w = right - left;
image->crop_rect.h = bottom - top;
jplanes = reinterpret_cast<jobjectArray>(env->CallObjectMethod(jimage, getPlanesMethod));
image->nplanes = env->GetArrayLength(jplanes);
for(int i=0; i<image->nplanes; i++) {
jobject jplane = env->GetObjectArrayElement(jplanes, i);
image->pixel_strides[i] = env->CallIntMethod(jplane, getPixelStrideMethod);
if(env->ExceptionCheck()) {
image->pixel_strides[i] = -1;
env->ExceptionClear();
}
image->row_strides[i] = env->CallIntMethod(jplane, getRowStrideMethod);
if(env->ExceptionCheck()) {
image->row_strides[i] = -1;
env->ExceptionClear();
}
jobject jbuffer = env->CallObjectMethod(jplane, getBufferMethod);
image->buffers[i] = (uint8_t *)env->GetDirectBufferAddress(jbuffer);
env->DeleteLocalRef(jbuffer);
env->DeleteLocalRef(jplane);
}
image->priv_ptr = env->NewGlobalRef(jimage);
end:
if(mediaCodecClass) env->DeleteLocalRef(mediaCodecClass);
if(imageClass) env->DeleteLocalRef(imageClass);
if(planeClass) env->DeleteLocalRef(planeClass);
if(rectClass) env->DeleteLocalRef(rectClass);
if(jimage) env->DeleteLocalRef(jimage);
if(jplanes) env->DeleteLocalRef(jplanes);
if(jrect) env->DeleteLocalRef(jrect);
return success;
}
bool AMediaCodec_getInputImage(AMediaCodec * codec, int index, AMediaImage *image) {
JNIEnv *env = ms_get_jni_env();
return _getImage(env, codec, "getInputImage", index, image);
}
bool AMediaCodec_getOutputImage(AMediaCodec *codec, int index, AMediaImage *image) {
JNIEnv *env = ms_get_jni_env();
return _getImage(env, codec, "getOutputImage", index, image);
}
void AMediaImage_close(AMediaImage *image) {
jclass imageClass = NULL;
jmethodID close;
bool_t success = TRUE;
JNIEnv *env = ms_get_jni_env();
jobject jimage = (jobject)image->priv_ptr;
success = success && _loadClass(env, "android/media/Image", &imageClass);
success = success && _getMethodID(env, imageClass, "close", "()V", &close);
if(!success) {
ms_error("%s: could not load some class or method ID", __FUNCTION__);
}
if(imageClass) {
env->CallVoidMethod(jimage, close);
env->DeleteLocalRef(imageClass);
}
env->DeleteGlobalRef(jimage);
image->priv_ptr = NULL;
}
bool_t AMediaImage_isAvailable(void) {
return ms_get_android_sdk_version() >= 21;
}
////////////////////////////////////////////////////
// //
......@@ -600,6 +760,7 @@ AMediaFormat *AMediaFormat_new(){
}
format->jformat = env->NewGlobalRef(jformat);
env->DeleteLocalRef(jformat);
env->DeleteLocalRef(mediaFormatClass);
return format;
}
......
......@@ -17,13 +17,31 @@ along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include "mediastreamer2/mscommon.h"
#include "mediastreamer2/msvideo.h"
#include <media/NdkMediaCodec.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct {
int format;
int width;
int height;
MSRect crop_rect;
uint64_t timestamp;
int nplanes;
int row_strides[4];
int pixel_strides[4];
uint8_t *buffers[4];
void *priv_ptr;
} AMediaImage;
void AMediaCodec_setParams(AMediaCodec *codec, const char *params);
bool AMediaCodec_getInputImage(AMediaCodec *codec, int index, AMediaImage *image);
bool AMediaCodec_getOutputImage(AMediaCodec *codec, int index, AMediaImage *image);
void AMediaImage_close(AMediaImage *image);
bool_t AMediaImage_isAvailable(void);
#ifdef __cplusplus
} // extern "C"
......
......@@ -21,6 +21,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#include "mediastreamer2/rfc3984.h"
#include "mediastreamer2/msvideo.h"
#include "mediastreamer2/msticker.h"
#include "android_mediacodec.h"
#include <jni.h>
#include <media/NdkMediaCodec.h>
......@@ -69,6 +70,7 @@ static void dec_init(MSFilter *f){
//Size mandatory for decoder configuration
AMediaFormat_setInt32(format,"width",1920);
AMediaFormat_setInt32(format,"height",1080);
if(AMediaImage_isAvailable()) AMediaFormat_setInt32(format, "color-format", 0x7f420888);
AMediaCodec_configure(codec, format, NULL, NULL, 0);
AMediaCodec_start(codec);
......@@ -314,17 +316,29 @@ static void dec_process(MSFilter *f){
if(buf != NULL && d->sps && d->pps){ /*some decoders output garbage while no sps or pps have been received yet !*/
if(width != 0 && height != 0 ){
if(color == 19) {
//YUV
int ysize = width*height;
int usize = ysize/4;
om = ms_yuv_buf_allocator_get(d->buf_allocator,&pic,width,height);
memcpy(pic.planes[0],buf,ysize);
memcpy(pic.planes[1],buf+ysize,usize);
memcpy(pic.planes[2],buf+ysize+usize,usize);
if(AMediaImage_isAvailable()) {
AMediaImage image;
int dst_pix_strides[4] = {1, 1, 1, 1};
MSRect dst_roi = {0, 0, pic.w, pic.h};
if(AMediaCodec_getOutputImage(d->codec, oBufidx, &image)) {
om = ms_yuv_buf_allocator_get(d->buf_allocator, &pic, width, height);
ms_yuv_buf_copy_with_pix_strides(image.buffers, image.row_strides, image.pixel_strides, image.crop_rect,
pic.planes, pic.strides, dst_pix_strides, dst_roi);
AMediaImage_close(&image);
}
} else {
uint8_t* cbcr_src = (uint8_t*) (buf + width * height);
om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
if(color == 19) {
//YUV
int ysize = width*height;
int usize = ysize/4;
om = ms_yuv_buf_allocator_get(d->buf_allocator,&pic,width,height);
memcpy(pic.planes[0],buf,ysize);
memcpy(pic.planes[1],buf+ysize,usize);
memcpy(pic.planes[2],buf+ysize+usize,usize);
} else {
uint8_t* cbcr_src = (uint8_t*) (buf + width * height);
om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
}
}
if (!d->first_image_decoded) {
......
......@@ -22,6 +22,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#include "mediastreamer2/msvideo.h"
#include "mediastreamer2/msticker.h"
#include "mediastreamer2/mscodecutils.h"
#include "mediastreamer2/msjava.h"
#include "android_mediacodec.h"
#include "h264utils.h"
......@@ -111,29 +112,36 @@ static void enc_preprocess(MSFilter* f) {
AMediaFormat_setInt32(format, "bitrate", d->vconf.required_bitrate);
AMediaFormat_setInt32(format, "frame-rate", d->vconf.fps);
AMediaFormat_setInt32(format, "bitrate-mode",1);
AMediaFormat_setInt32(format, "level", 1); // Ask for baseline AVC profile
if(status != 0){
d->isYUV = FALSE;
AMediaFormat_setInt32(format, "color-format", 21); /*the semi-planar YUV*/
status = AMediaCodec_configure(d->codec, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
}
if(status != 0){
d->isYUV = TRUE;
AMediaFormat_setInt32(format, "color-format", 19); /*basic YUV420P*/
status = AMediaCodec_configure(d->codec, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
}
if(status != 0){
d->isYUV = TRUE;
AMediaFormat_setInt32(format, "color-format", 0x7f420888); /*the new "flexible YUV", appeared in API23*/
status = AMediaCodec_configure(d->codec, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
if(AMediaImage_isAvailable()) {
if(status != 0){
AMediaFormat_setInt32(format, "color-format", 0x7f420888); /*the new "flexible YUV", appeared in API23*/
status = AMediaCodec_configure(d->codec, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
}
} else {
if(status != 0){
d->isYUV = FALSE;
AMediaFormat_setInt32(format, "color-format", 21); /*the semi-planar YUV*/
status = AMediaCodec_configure(d->codec, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
}
if(status != 0){
d->isYUV = TRUE;
AMediaFormat_setInt32(format, "color-format", 19); /*basic YUV420P*/
status = AMediaCodec_configure(d->codec, format, NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
}
}
if (status != 0){
ms_error("MSMediaCodecH264Enc: Could not configure encoder.");
AMediaCodec_delete(d->codec);
d->codec = NULL;
} else {
int32_t color_format;
if(!AMediaFormat_getInt32(format, "color-format", &color_format)) {
color_format = -1;
}
ms_message("MSMediaCodecH264Enc: encoder successfully configured. color-format=%d", color_format);
}
if (d->codec){
......@@ -141,6 +149,8 @@ static void enc_preprocess(MSFilter* f) {
ms_error("MSMediaCodecH264Enc: Could not start encoder.");
AMediaCodec_delete(d->codec);
d->codec = NULL;
} else {
ms_message("MSMediaCodecH264Enc: encoder successfully started");
}
}
AMediaFormat_delete(format);
......@@ -181,7 +191,6 @@ static void enc_process(MSFilter *f){
while((im=ms_queue_get(f->inputs[0]))!=NULL){
if (ms_yuv_buf_init_from_mblk(&pic,im)==0){
AMediaCodecBufferInfo info;
uint8_t *buf=NULL;
size_t bufsize;
ssize_t ibufidx, obufidx;
bool have_seen_sps_pps;
......@@ -194,26 +203,42 @@ static void enc_process(MSFilter *f){
ibufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US);
if (ibufidx >= 0) {
buf = AMediaCodec_getInputBuffer(d->codec, ibufidx, &bufsize);
if(buf != NULL){
if(d->isYUV){
int ysize = pic.w * pic.h;
int usize = ysize / 4;
memcpy(buf, pic.planes[0], ysize);
memcpy(buf + ysize, pic.planes[1], usize);
memcpy(buf + ysize+usize, pic.planes[2], usize);
} else {
int i;
size_t size=(size_t) pic.w * pic.h;
uint8_t *dst = pic.planes[0];
memcpy(buf,dst,size);
for (i = 0; i < pic.w/2*pic.h/2; i++){
buf[size+2*i]=pic.planes[1][i];
buf[size+2*i+1]=pic.planes[2][i];
if(AMediaImage_isAvailable()) {
AMediaImage image;
if(AMediaCodec_getInputImage(d->codec, ibufidx, &image)) {
if(image.format == 35 /* YUV_420_888 */) {
MSRect src_roi = {0, 0, pic.w, pic.h};
int src_pix_strides[4] = {1, 1, 1, 1};
ms_yuv_buf_copy_with_pix_strides(pic.planes, pic.strides, src_pix_strides, src_roi, image.buffers, image.row_strides, image.pixel_strides, image.crop_rect);
AMediaImage_close(&image);
AMediaCodec_queueInputBuffer(d->codec, ibufidx, 0, (size_t)image.width*image.height*3/2, f->ticker->time*1000, 0);
} else {
ms_error("%s: encoder require non YUV420 format", f->desc->name);
AMediaImage_close(&image);
}
}
} else {
uint8_t *buf = AMediaCodec_getInputBuffer(d->codec, ibufidx, &bufsize);
if(buf != NULL){
if(d->isYUV){
int ysize = pic.w * pic.h;
int usize = ysize / 4;
memcpy(buf, pic.planes[0], ysize);
memcpy(buf + ysize, pic.planes[1], usize);
memcpy(buf + ysize+usize, pic.planes[2], usize);
} else {
int i;
size_t size=(size_t) pic.w * pic.h;
uint8_t *dst = pic.planes[0];
memcpy(buf,dst,size);
for (i = 0; i < pic.w/2*pic.h/2; i++){
buf[size+2*i]=pic.planes[1][i];
buf[size+2*i+1]=pic.planes[2][i];
}
}
AMediaCodec_queueInputBuffer(d->codec, ibufidx, 0, (size_t)(pic.w * pic.h)*3/2, f->ticker->time*1000,0);
}
AMediaCodec_queueInputBuffer(d->codec, ibufidx, 0, (size_t)(pic.w * pic.h)*3/2, f->ticker->time*1000,0);
}
}else if (ibufidx == AMEDIA_ERROR_UNKNOWN){
ms_error("MSMediaCodecH264Enc: AMediaCodec_dequeueInputBuffer() had an exception");
......@@ -221,7 +246,7 @@ static void enc_process(MSFilter *f){
have_seen_sps_pps = FALSE; /*this checks whether at a single timestamp point we dequeued SPS PPS before IDR*/
while((obufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US)) >= 0) {
buf = AMediaCodec_getOutputBuffer(d->codec, obufidx, &bufsize);
uint8_t *buf = AMediaCodec_getOutputBuffer(d->codec, obufidx, &bufsize);
if (buf){
mblk_t *m;
ms_h264_bitstream_to_nalus(buf, info.size, &nalus);
......
......@@ -78,6 +78,20 @@ JNIEnv *ms_get_jni_env(void){
#ifdef ANDROID
int ms_get_android_sdk_version(void) {
static int sdk_version = 0;
if (sdk_version==0){
/* Get Android SDK version. */
JNIEnv *env = ms_get_jni_env();
jclass version_class = (*env)->FindClass(env, "android/os/Build$VERSION");
jfieldID fid = (*env)->GetStaticFieldID(env, version_class, "SDK_INT", "I");
sdk_version = (*env)->GetStaticIntField(env, version_class, fid);
ms_message("SDK version [%i] detected", sdk_version);
(*env)->DeleteLocalRef(env, version_class);
}
return sdk_version;
}
JNIEXPORT void JNICALL Java_org_linphone_mediastream_Log_d(JNIEnv* env, jobject thiz, jstring jmsg) {
const char* msg = jmsg ? (*env)->GetStringUTFChars(env, jmsg, NULL) : NULL;
ms_debug("%s", msg);
......@@ -102,4 +116,4 @@ JNIEXPORT void JNICALL Java_org_linphone_mediastream_Log_e(JNIEnv* env, jobject
if (msg) (*env)->ReleaseStringUTFChars(env, jmsg, msg);
}
#endif
\ No newline at end of file
#endif
......@@ -156,27 +156,61 @@ mblk_t* ms_yuv_buf_alloc_from_buffer(int w, int h, mblk_t* buffer) {
return msg;
}
static void plane_copy(const uint8_t *src_plane, int src_stride,
uint8_t *dst_plane, int dst_stride, MSVideoSize roi){
int i;
if ((roi.width == src_stride) && (roi.width == dst_stride)) {
memcpy(dst_plane, src_plane, roi.width * roi.height);
return;
static void row_copy(const uint8_t *src, uint8_t *dst, size_t width, size_t src_pix_stride, size_t dst_pix_stride) {
if(src_pix_stride == 1 && dst_pix_stride == 1) {
memcpy(dst, src, width);
} else {
const uint8_t *r_ptr = src;
uint8_t *w_ptr = dst;
const uint8_t *src_end = src + width * src_pix_stride;
const uint8_t *dst_end = dst + width * dst_pix_stride;
while(r_ptr < src_end && w_ptr < dst_end) {
*w_ptr = *r_ptr;
r_ptr += src_pix_stride;
w_ptr += dst_pix_stride;
}
}
for(i=0;i<roi.height;++i){
memcpy(dst_plane,src_plane,roi.width);
src_plane+=src_stride;
dst_plane+=dst_stride;
}
static void plane_copy(const uint8_t *src_plane, size_t src_row_stride, size_t src_pix_stride, const MSRect *src_roi,
uint8_t *dst_plane, size_t dst_row_stride, size_t dst_pix_stride, const MSRect *dst_roi) {
const uint8_t *r_ptr = src_plane + (src_roi->y * src_row_stride + src_roi->x * src_pix_stride);
uint8_t *w_ptr = dst_plane + (dst_roi->y * dst_row_stride + dst_roi->x * dst_pix_stride);
int i;
for(i=0; i<src_roi->h; i++) {
row_copy(r_ptr, w_ptr, src_roi->w, src_pix_stride, dst_pix_stride);
r_ptr += src_row_stride;
w_ptr += dst_row_stride;
}
}
void ms_yuv_buf_copy(uint8_t *src_planes[], const int src_strides[],
uint8_t *dst_planes[], const int dst_strides[], MSVideoSize roi){
plane_copy(src_planes[0],src_strides[0],dst_planes[0],dst_strides[0],roi);
roi.width=roi.width/2;
roi.height=roi.height/2;
plane_copy(src_planes[1],src_strides[1],dst_planes[1],dst_strides[1],roi);
plane_copy(src_planes[2],src_strides[2],dst_planes[2],dst_strides[2],roi);
uint8_t *dst_planes[], const int dst_strides[], MSVideoSize roi) {
MSRect roi_rect = {0, 0, roi.width, roi.height};
plane_copy(src_planes[0], src_strides[0], 1, &roi_rect, dst_planes[0], dst_strides[0], 1, &roi_rect);
roi_rect.w /= 2;
roi_rect.h /= 2;
plane_copy(src_planes[1], src_strides[1], 1, &roi_rect, dst_planes[1], dst_strides[1], 1, &roi_rect);
plane_copy(src_planes[2], src_strides[2], 1, &roi_rect, dst_planes[2], dst_strides[2], 1, &roi_rect);
}
void ms_yuv_buf_copy_with_pix_strides(uint8_t *src_planes[], const int src_row_strides[], const int src_pix_strides[], MSRect src_roi,
uint8_t *dst_planes[], const int dst_row_strides[], const int dst_pix_strides[], MSRect dst_roi) {
plane_copy(src_planes[0], src_row_strides[0], src_pix_strides[0], &src_roi, dst_planes[0], dst_row_strides[0], dst_pix_strides[0], &dst_roi);
src_roi.x /= 2;
src_roi.y /= 2;
src_roi.w /= 2;
src_roi.h /= 2;
dst_roi.x /= 2;
dst_roi.y /= 2;
dst_roi.w /= 2;
dst_roi.h /= 2;
plane_copy(src_planes[1], src_row_strides[1], src_pix_strides[1], &src_roi, dst_planes[1], dst_row_strides[1], dst_pix_strides[1], &dst_roi);
plane_copy(src_planes[2], src_row_strides[2], src_pix_strides[2], &src_roi, dst_planes[2], dst_row_strides[2], dst_pix_strides[2], &dst_roi);
}
MSYuvBufAllocator *ms_yuv_buf_allocator_new(void) {
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment