vp9_encodeframe.c 122 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
/*
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS.  All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
 */

#include <limits.h>
#include <math.h>
#include <stdio.h>

Jim Bankoski's avatar
Jim Bankoski committed
#include "./vp9_rtcd.h"
#include "./vpx_config.h"

#include "vpx_ports/vpx_timer.h"

#include "vp9/common/vp9_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_entropymode.h"
#include "vp9/common/vp9_idct.h"
#include "vp9/common/vp9_mvref_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_quant_common.h"
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_tile_common.h"
#include "vp9/encoder/vp9_aq_complexity.h"
#include "vp9/encoder/vp9_aq_cyclicrefresh.h"
#include "vp9/encoder/vp9_aq_variance.h"
#include "vp9/encoder/vp9_encodeframe.h"
#include "vp9/encoder/vp9_encodemb.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_extend.h"
#include "vp9/encoder/vp9_pickmode.h"
#include "vp9/encoder/vp9_rdopt.h"
#include "vp9/encoder/vp9_segmentation.h"
#include "vp9/encoder/vp9_tokenize.h"
#define GF_ZEROMV_ZBIN_BOOST 0
#define LF_ZEROMV_ZBIN_BOOST 0
#define MV_ZBIN_BOOST        0
#define SPLIT_MV_ZBIN_BOOST  0
#define INTRA_ZBIN_BOOST     0

static INLINE uint8_t *get_sb_index(MACROBLOCK *x, BLOCK_SIZE subsize) {
  switch (subsize) {
    case BLOCK_64X64:
    case BLOCK_64X32:
    case BLOCK_32X64:
    case BLOCK_32X32:
    case BLOCK_32X16:
    case BLOCK_16X32:
    case BLOCK_16X16:
    case BLOCK_16X8:
    case BLOCK_8X16:
    case BLOCK_8X8:
    case BLOCK_8X4:
    case BLOCK_4X8:
    case BLOCK_4X4:
Jim Bankoski's avatar
Jim Bankoski committed
static void encode_superblock(VP9_COMP *cpi, TOKENEXTRA **t, int output_enabled,
                              int mi_row, int mi_col, BLOCK_SIZE bsize);
static void adjust_act_zbin(VP9_COMP *cpi, MACROBLOCK *x);
// activity_avg must be positive, or flat regions could get a zero weight
//  (infinite lambda), which confounds analysis.
// This also avoids the need for divide by zero checks in
//  vp9_activity_masking().
#define ACTIVITY_AVG_MIN 64
// Motion vector component magnitude threshold for defining fast motion.
#define FAST_MOTION_MV_THRESH 24
// This is used as a reference when computing the source variance for the
//  purposes of activity masking.
// Eventually this should be replaced by custom no-reference routines,
//  which will be faster.
static const uint8_t VP9_VAR_OFFS[64] = {
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128,
  128, 128, 128, 128, 128, 128, 128, 128
};

static unsigned int get_sby_perpixel_variance(VP9_COMP *cpi,
                                              MACROBLOCK *x,
  unsigned int var, sse;
  var = cpi->fn_ptr[bs].vf(x->plane[0].src.buf, x->plane[0].src.stride,
                           VP9_VAR_OFFS, 0, &sse);
  return ROUND_POWER_OF_TWO(var, num_pels_log2_lookup[bs]);
static unsigned int get_sby_perpixel_diff_variance(VP9_COMP *cpi,
                                                   MACROBLOCK *x,
                                                   int mi_row,
                                                   int mi_col,
                                                   BLOCK_SIZE bs) {
  const YV12_BUFFER_CONFIG *yv12 = get_ref_frame_buffer(cpi, LAST_FRAME);
  int offset = (mi_row * MI_SIZE) * yv12->y_stride + (mi_col * MI_SIZE);
  unsigned int var, sse;
  var = cpi->fn_ptr[bs].vf(x->plane[0].src.buf,
                           x->plane[0].src.stride,
                           yv12->y_buffer + offset,
                           yv12->y_stride,
                           &sse);
  return ROUND_POWER_OF_TWO(var, num_pels_log2_lookup[bs]);
}

static BLOCK_SIZE get_rd_var_based_fixed_partition(VP9_COMP *cpi,
                                                   int mi_row,
                                                   int mi_col) {
  unsigned int var = get_sby_perpixel_diff_variance(cpi, &cpi->mb,
                                                    mi_row, mi_col,
                                                    BLOCK_64X64);
  if (var < 8)
    return BLOCK_64X64;
  else if (var < 128)
    return BLOCK_32X32;
  else if (var < 2048)
    return BLOCK_16X16;
  else
    return BLOCK_8X8;
static BLOCK_SIZE get_nonrd_var_based_fixed_partition(VP9_COMP *cpi,
                                                      int mi_row,
                                                      int mi_col) {
  unsigned int var = get_sby_perpixel_diff_variance(cpi, &cpi->mb,
                                                    mi_row, mi_col,
                                                    BLOCK_64X64);
    return BLOCK_64X64;
  else if (var < 10)
    return BLOCK_32X32;
  else
    return BLOCK_16X16;
// Lighter version of set_offsets that only sets the mode info
// pointers.
static INLINE void set_modeinfo_offsets(VP9_COMMON *const cm,
                                        MACROBLOCKD *const xd,
                                        int mi_row,
                                        int mi_col) {
  const int idx_str = xd->mi_stride * mi_row + mi_col;
  xd->mi = cm->mi_grid_visible + idx_str;
  xd->mi[0] = cm->mi + idx_str;
static int is_block_in_mb_map(const VP9_COMP *cpi, int mi_row, int mi_col,
                              BLOCK_SIZE bsize) {
  const VP9_COMMON *const cm = &cpi->common;
  const int mb_rows = cm->mb_rows;
  const int mb_cols = cm->mb_cols;
  const int mb_row = mi_row >> 1;
  const int mb_col = mi_col >> 1;
  const int mb_width = num_8x8_blocks_wide_lookup[bsize] >> 1;
  const int mb_height = num_8x8_blocks_high_lookup[bsize] >> 1;
  int r, c;
  if (bsize <= BLOCK_16X16) {
    return cpi->active_map[mb_row * mb_cols + mb_col];
  }
  for (r = 0; r < mb_height; ++r) {
    for (c = 0; c < mb_width; ++c) {
      int row = mb_row + r;
      int col = mb_col + c;
      if (row >= mb_rows || col >= mb_cols)
        continue;
      if (cpi->active_map[row * mb_cols + col])
        return 1;
    }
  }
  return 0;
}

static int check_active_map(const VP9_COMP *cpi, const MACROBLOCK *x,
                            int mi_row, int mi_col,
                            BLOCK_SIZE bsize) {
  if (cpi->active_map_enabled && !x->e_mbd.lossless) {
Loading full blame...