/* * Copyright (c) 2016, Alliance for Open Media. All rights reserved. * * This source code is subject to the terms of the BSD 2 Clause License and * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License * was not distributed with this source code in the LICENSE file, you can * obtain it at www.aomedia.org/license/software. If the Alliance for Open * Media Patent License 1.0 was not distributed with this source code in the * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
*/ #ifndef AOM_AV1_COMMON_MVREF_COMMON_H_ #define AOM_AV1_COMMON_MVREF_COMMON_H_
// Set the upper limit of the motion vector component magnitude. // This would make a motion vector fit in 26 bits. Plus 3 bits for the // reference frame index. A tuple of motion vector can hence be stored within // 32 bit range for efficient load/store operations. #define REFMVS_LIMIT ((1 << 12) - 1)
typedefstruct position { int row; int col;
} POSITION;
// clamp_mv_ref #define MV_BORDER (16 << 3) // Allow 16 pels in 1/8th pel units
staticinlineint get_relative_dist(const OrderHintInfo *oh, int a, int b) { if (!oh->enable_order_hint) return 0;
// Checks that the given mi_row, mi_col and search point // are inside the borders of the tile. staticinlineint is_inside(const TileInfo *const tile, int mi_col, int mi_row, const POSITION *mi_pos) { return !(mi_row + mi_pos->row < tile->mi_row_start ||
mi_col + mi_pos->col < tile->mi_col_start ||
mi_row + mi_pos->row >= tile->mi_row_end ||
mi_col + mi_pos->col >= tile->mi_col_end);
}
// NOTE: Following reference frame pairs are not supported to be explicitly // signalled, but they are possibly chosen by the use of skip_mode, // which may use the most recent one-sided reference frame pair.
{ LAST2_FRAME, LAST3_FRAME }, { LAST2_FRAME, GOLDEN_FRAME },
{ LAST3_FRAME, GOLDEN_FRAME }, {BWDREF_FRAME, ALTREF2_FRAME},
{ ALTREF2_FRAME, ALTREF_FRAME }
}; // clang-format on
// Above neighbor if (above_in_image && is_inter_block(above_mbmi)) {
ref_counts[above_mbmi->ref_frame[0]]++; if (has_second_ref(above_mbmi)) {
ref_counts[above_mbmi->ref_frame[1]]++;
}
}
// Left neighbor if (left_in_image && is_inter_block(left_mbmi)) {
ref_counts[left_mbmi->ref_frame[0]]++; if (has_second_ref(left_mbmi)) {
ref_counts[left_mbmi->ref_frame[1]]++;
}
}
}
void av1_copy_frame_mvs(const AV1_COMMON *const cm, const MB_MODE_INFO *const mi, int mi_row, int mi_col, int x_mis, int y_mis);
// The global_mvs output parameter points to an array of REF_FRAMES elements. // The caller may pass a null global_mvs if it does not need the global_mvs // output. void av1_find_mv_refs(const AV1_COMMON *cm, const MACROBLOCKD *xd,
MB_MODE_INFO *mi, MV_REFERENCE_FRAME ref_frame,
uint8_t ref_mv_count[MODE_CTX_REF_FRAMES],
CANDIDATE_MV ref_mv_stack[][MAX_REF_MV_STACK_SIZE],
uint16_t ref_mv_weight[][MAX_REF_MV_STACK_SIZE],
int_mv mv_ref_list[][MAX_MV_REF_CANDIDATES],
int_mv *global_mvs, int16_t *mode_context);
// check a list of motion vectors by sad score using a number rows of pixels // above and a number cols of pixels in the left to select the one with best // score to use as ref motion vector void av1_find_best_ref_mvs(int allow_hp, int_mv *mvlist, int_mv *nearest_mv,
int_mv *near_mv, int is_integer);
uint8_t av1_selectSamples(MV *mv, int *pts, int *pts_inref, int len,
BLOCK_SIZE bsize);
uint8_t av1_findSamples(const AV1_COMMON *cm, MACROBLOCKD *xd, int *pts, int *pts_inref);
// Wavefront constraint: use only top left area of frame for reference. constint gradient = 1 + INTRABC_DELAY_SB64 + (sb_size > 64); constint wf_offset = gradient * (active_sb_row - src_sb_row); if (src_sb_row > active_sb_row ||
src_sb64_col >= active_sb64_col - INTRABC_DELAY_SB64 + wf_offset) return 0;
return 1;
}
#ifdef __cplusplus
} // extern "C" #endif
#endif// AOM_AV1_COMMON_MVREF_COMMON_H_
Messung V0.5
¤ Dauer der Verarbeitung: 0.13 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.