diff --git a/vp9/encoder/vp9_encoder.c b/vp9/encoder/vp9_encoder.c index 5d1dd4d8ee2386bccceb167384a7a34b97ff3a8f..da57370b005cadc3edf12aa21da3fa0aadf1cf15 100644 --- a/vp9/encoder/vp9_encoder.c +++ b/vp9/encoder/vp9_encoder.c @@ -1676,6 +1676,10 @@ VP9_COMP *vp9_create_compressor(VP9EncoderConfig *oxcf) { vp9_sub_pixel_avg_variance4x4, vp9_sad4x4x3, vp9_sad4x4x8, vp9_sad4x4x4d) +#if CONFIG_VP9_HIGHBITDEPTH + highbd_set_var_fns(cpi); +#endif + /* vp9_init_quantizer() is first called here. Add check in * vp9_frame_init_quantizer() so that vp9_init_quantizer is only * called later when needed. This will avoid unnecessary calls of diff --git a/vp9/vp9_cx_iface.c b/vp9/vp9_cx_iface.c index fbf4aa2fccfa61295e462f8e3435259382472e84..473166cb7e67c0bcfa5cefac4a74ca31ad222e14 100644 --- a/vp9/vp9_cx_iface.c +++ b/vp9/vp9_cx_iface.c @@ -274,27 +274,47 @@ static vpx_codec_err_t validate_config(vpx_codec_alg_priv_t *ctx, } #if !CONFIG_VP9_HIGHBITDEPTH - if (cfg->g_profile > (unsigned int)PROFILE_1) + if (cfg->g_profile > (unsigned int)PROFILE_1) { ERROR("Profile > 1 not supported in this build configuration"); + } #endif if (cfg->g_profile <= (unsigned int)PROFILE_1 && - extra_cfg->bit_depth > VPX_BITS_8) + cfg->g_bit_depth > VPX_BITS_8) { ERROR("Codec high bit-depth not supported in profile < 2"); + } + if (cfg->g_profile <= (unsigned int)PROFILE_1 && + cfg->g_input_bit_depth > 8) { + ERROR("Source high bit-depth not supported in profile < 2"); + } if (cfg->g_profile > (unsigned int)PROFILE_1 && - extra_cfg->bit_depth == VPX_BITS_8) + cfg->g_bit_depth == VPX_BITS_8) { ERROR("Codec bit-depth 8 not supported in profile > 1"); + } return VPX_CODEC_OK; } - static vpx_codec_err_t validate_img(vpx_codec_alg_priv_t *ctx, const vpx_image_t *img) { switch (img->fmt) { case VPX_IMG_FMT_YV12: case VPX_IMG_FMT_I420: + case VPX_IMG_FMT_I42016: + break; case VPX_IMG_FMT_I422: case VPX_IMG_FMT_I444: + if (ctx->cfg.g_profile != (unsigned int)PROFILE_1) { + ERROR("Invalid image format. I422, I444 images are " + "not supported in profile."); + } + break; + case VPX_IMG_FMT_I42216: + case VPX_IMG_FMT_I44416: + if (ctx->cfg.g_profile != (unsigned int)PROFILE_1 && + ctx->cfg.g_profile != (unsigned int)PROFILE_3) { + ERROR("Invalid image format. 16-bit I422, I444 images are " + "not supported in profile."); + } break; default: ERROR("Invalid image format. Only YV12, I420, I422, I444 images are " @@ -330,7 +350,7 @@ static vpx_codec_err_t set_encoder_config( oxcf->profile = cfg->g_profile; oxcf->width = cfg->g_w; oxcf->height = cfg->g_h; - oxcf->bit_depth = extra_cfg->bit_depth; + oxcf->bit_depth = cfg->g_bit_depth; oxcf->input_bit_depth = cfg->g_input_bit_depth; // guess a frame rate if out of whack, use 30 oxcf->init_framerate = (double)cfg->g_timebase.den / cfg->g_timebase.num; diff --git a/vpx/src/vpx_image.c b/vpx/src/vpx_image.c index e58b61ea390d7878240d2aab99adc9d45dc51b13..6ae461d15f824f2507c7fd8afc6a84fd6ed9aa22 100644 --- a/vpx/src/vpx_image.c +++ b/vpx/src/vpx_image.c @@ -110,6 +110,7 @@ static vpx_image_t *img_alloc_helper(vpx_image_t *img, case VPX_IMG_FMT_YV12: case VPX_IMG_FMT_VPXI420: case VPX_IMG_FMT_VPXYV12: + case VPX_IMG_FMT_I42016: ycs = 1; break; default: @@ -209,39 +210,40 @@ int vpx_img_set_rect(vpx_image_t *img, img->planes[VPX_PLANE_PACKED] = img->img_data + x * img->bps / 8 + y * img->stride[VPX_PLANE_PACKED]; } else { + const int bytes_per_sample = + (img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1; data = img->img_data; if (img->fmt & VPX_IMG_FMT_HAS_ALPHA) { img->planes[VPX_PLANE_ALPHA] = - data + x + y * img->stride[VPX_PLANE_ALPHA]; + data + x * bytes_per_sample + y * img->stride[VPX_PLANE_ALPHA]; data += img->h * img->stride[VPX_PLANE_ALPHA]; } - img->planes[VPX_PLANE_Y] = data + x + y * img->stride[VPX_PLANE_Y]; + img->planes[VPX_PLANE_Y] = data + x * bytes_per_sample + + y * img->stride[VPX_PLANE_Y]; data += img->h * img->stride[VPX_PLANE_Y]; if (!(img->fmt & VPX_IMG_FMT_UV_FLIP)) { - img->planes[VPX_PLANE_U] = data - + (x >> img->x_chroma_shift) - + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; + img->planes[VPX_PLANE_U] = + data + (x >> img->x_chroma_shift) * bytes_per_sample + + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; data += (img->h >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; - img->planes[VPX_PLANE_V] = data - + (x >> img->x_chroma_shift) - + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_V]; + img->planes[VPX_PLANE_V] = + data + (x >> img->x_chroma_shift) * bytes_per_sample + + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_V]; } else { - img->planes[VPX_PLANE_V] = data - + (x >> img->x_chroma_shift) - + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_V]; + img->planes[VPX_PLANE_V] = + data + (x >> img->x_chroma_shift) * bytes_per_sample + + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_V]; data += (img->h >> img->y_chroma_shift) * img->stride[VPX_PLANE_V]; - img->planes[VPX_PLANE_U] = data - + (x >> img->x_chroma_shift) - + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; + img->planes[VPX_PLANE_U] = + data + (x >> img->x_chroma_shift) * bytes_per_sample + + (y >> img->y_chroma_shift) * img->stride[VPX_PLANE_U]; } } - return 0; } - return -1; }