vp9_iface_common.h 5.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12
/*
 *  Copyright (c) 2013 The WebM project authors. All Rights Reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */
#ifndef VP9_VP9_IFACE_COMMON_H_
#define VP9_VP9_IFACE_COMMON_H_

13 14
#include "vpx_ports/mem.h"

15
static void yuvconfig2image(vpx_image_t *img, const YV12_BUFFER_CONFIG *yv12,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
16
                            void *user_priv) {
17 18 19 20
  /** vpx_img_wrap() doesn't allow specifying independent strides for
    * the Y, U, and V planes, nor other alignment adjustments that
    * might be representable by a YV12_BUFFER_CONFIG, so we just
    * initialize all the fields.*/
21
  int bps;
22 23
  if (!yv12->subsampling_y) {
    if (!yv12->subsampling_x) {
24 25 26 27 28 29 30
      img->fmt = VPX_IMG_FMT_I444;
      bps = 24;
    } else {
      img->fmt = VPX_IMG_FMT_I422;
      bps = 16;
    }
  } else {
31
    if (!yv12->subsampling_x) {
32 33 34 35 36 37
      img->fmt = VPX_IMG_FMT_I440;
      bps = 16;
    } else {
      img->fmt = VPX_IMG_FMT_I420;
      bps = 12;
    }
38
  }
39
  img->cs = yv12->color_space;
40
  img->range = yv12->color_range;
41
  img->bit_depth = 8;
42
  img->w = yv12->y_stride;
43
  img->h = ALIGN_POWER_OF_TWO(yv12->y_height + 2 * VP9_ENC_BORDER_IN_PIXELS, 3);
44 45
  img->d_w = yv12->y_crop_width;
  img->d_h = yv12->y_crop_height;
46 47
  img->r_w = yv12->render_width;
  img->r_h = yv12->render_height;
48 49
  img->x_chroma_shift = yv12->subsampling_x;
  img->y_chroma_shift = yv12->subsampling_y;
50 51 52
  img->planes[VPX_PLANE_Y] = yv12->y_buffer;
  img->planes[VPX_PLANE_U] = yv12->u_buffer;
  img->planes[VPX_PLANE_V] = yv12->v_buffer;
53
  img->planes[VPX_PLANE_ALPHA] = NULL;
54 55 56
  img->stride[VPX_PLANE_Y] = yv12->y_stride;
  img->stride[VPX_PLANE_U] = yv12->uv_stride;
  img->stride[VPX_PLANE_V] = yv12->uv_stride;
57
  img->stride[VPX_PLANE_ALPHA] = yv12->y_stride;
58 59 60 61
#if CONFIG_VP9_HIGHBITDEPTH
  if (yv12->flags & YV12_FLAG_HIGHBITDEPTH) {
    // vpx_image_t uses byte strides and a pointer to the first byte
    // of the image.
62
    img->fmt = (vpx_img_fmt_t)(img->fmt | VPX_IMG_FMT_HIGHBITDEPTH);
63
    img->bit_depth = yv12->bit_depth;
64 65 66
    img->planes[VPX_PLANE_Y] = (uint8_t *)CONVERT_TO_SHORTPTR(yv12->y_buffer);
    img->planes[VPX_PLANE_U] = (uint8_t *)CONVERT_TO_SHORTPTR(yv12->u_buffer);
    img->planes[VPX_PLANE_V] = (uint8_t *)CONVERT_TO_SHORTPTR(yv12->v_buffer);
67 68 69 70 71 72 73
    img->planes[VPX_PLANE_ALPHA] = NULL;
    img->stride[VPX_PLANE_Y] = 2 * yv12->y_stride;
    img->stride[VPX_PLANE_U] = 2 * yv12->uv_stride;
    img->stride[VPX_PLANE_V] = 2 * yv12->uv_stride;
    img->stride[VPX_PLANE_ALPHA] = 2 * yv12->y_stride;
  }
#endif  // CONFIG_VP9_HIGHBITDEPTH
74
  img->bps = bps;
75 76 77 78 79 80
  img->user_priv = user_priv;
  img->img_data = yv12->buffer_alloc;
  img->img_data_owner = 0;
  img->self_allocd = 0;
}

81 82 83 84 85 86
static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
                                       YV12_BUFFER_CONFIG *yv12) {
  yv12->y_buffer = img->planes[VPX_PLANE_Y];
  yv12->u_buffer = img->planes[VPX_PLANE_U];
  yv12->v_buffer = img->planes[VPX_PLANE_V];

87
  yv12->y_crop_width = img->d_w;
88
  yv12->y_crop_height = img->d_h;
89
  yv12->render_width = img->r_w;
90
  yv12->render_height = img->r_h;
91
  yv12->y_width = img->d_w;
92 93
  yv12->y_height = img->d_h;

94 95 96 97
  yv12->uv_width =
      img->x_chroma_shift == 1 ? (1 + yv12->y_width) / 2 : yv12->y_width;
  yv12->uv_height =
      img->y_chroma_shift == 1 ? (1 + yv12->y_height) / 2 : yv12->y_height;
98 99
  yv12->uv_crop_width = yv12->uv_width;
  yv12->uv_crop_height = yv12->uv_height;
100 101 102

  yv12->y_stride = img->stride[VPX_PLANE_Y];
  yv12->uv_stride = img->stride[VPX_PLANE_U];
103
  yv12->color_space = img->cs;
104
  yv12->color_range = img->range;
105

106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126
#if CONFIG_VP9_HIGHBITDEPTH
  if (img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) {
    // In vpx_image_t
    //     planes point to uint8 address of start of data
    //     stride counts uint8s to reach next row
    // In YV12_BUFFER_CONFIG
    //     y_buffer, u_buffer, v_buffer point to uint16 address of data
    //     stride and border counts in uint16s
    // This means that all the address calculations in the main body of code
    // should work correctly.
    // However, before we do any pixel operations we need to cast the address
    // to a uint16 ponter and double its value.
    yv12->y_buffer = CONVERT_TO_BYTEPTR(yv12->y_buffer);
    yv12->u_buffer = CONVERT_TO_BYTEPTR(yv12->u_buffer);
    yv12->v_buffer = CONVERT_TO_BYTEPTR(yv12->v_buffer);
    yv12->y_stride >>= 1;
    yv12->uv_stride >>= 1;
    yv12->flags = YV12_FLAG_HIGHBITDEPTH;
  } else {
    yv12->flags = 0;
  }
127
  yv12->border = (yv12->y_stride - img->w) / 2;
128
#else
129
  yv12->border = (img->stride[VPX_PLANE_Y] - img->w) / 2;
130
#endif  // CONFIG_VP9_HIGHBITDEPTH
131 132
  yv12->subsampling_x = img->x_chroma_shift;
  yv12->subsampling_y = img->y_chroma_shift;
133 134 135
  return VPX_CODEC_OK;
}

136 137
static VP9_REFFRAME ref_frame_to_vp9_reframe(vpx_ref_frame_type_t frame) {
  switch (frame) {
138 139 140
    case VP8_LAST_FRAME: return VP9_LAST_FLAG;
    case VP8_GOLD_FRAME: return VP9_GOLD_FLAG;
    case VP8_ALTR_FRAME: return VP9_ALT_FLAG;
141 142 143 144
  }
  assert(0 && "Invalid Reference Frame");
  return VP9_LAST_FLAG;
}
Dmitry Kovalev's avatar
Dmitry Kovalev committed
145
#endif  // VP9_VP9_IFACE_COMMON_H_