ref: f8df9169311120964e3dbc22b62db114ab050fbb
parent: 688c99a7066df47e1f7490519248ea0572dae5d7
 parent: 1bfacd3529d1297f3af3796f5801f47c0b4c03cb
	author: Alex Converse <aconverse@google.com>
	date: Fri Mar 13 06:20:15 EDT 2015
	
Merge "Reconcile active_map and cyclic refresh"
--- a/vp9/encoder/vp9_aq_cyclicrefresh.c
+++ b/vp9/encoder/vp9_aq_cyclicrefresh.c
@@ -214,7 +214,7 @@
// If this block is labeled for refresh, check if we should reset the
// segment_id.
- if (mbmi->segment_id != CR_SEGMENT_ID_BASE)
+ if (cyclic_refresh_segment_id_boosted(mbmi->segment_id))
mbmi->segment_id = refresh_this_block;
// Update the cyclic refresh map, to be used for setting segmentation map
@@ -221,7 +221,7 @@
// for the next frame. If the block will be refreshed this frame, mark it
// as clean. The magnitude of the -ve influences how long before we consider
// it for refresh again.
-  if (mbmi->segment_id != CR_SEGMENT_ID_BASE) {+  if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) {new_map_value = -cr->time_for_refresh;
   } else if (refresh_this_block) {// Else if it is accepted as candidate for refresh, and has not already
@@ -252,10 +252,11 @@
int mi_row, mi_col;
cr->actual_num_seg_blocks = 0;
for (mi_row = 0; mi_row < cm->mi_rows; mi_row++)
-  for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {- if (seg_map[mi_row * cm->mi_cols + mi_col] != CR_SEGMENT_ID_BASE)
- cr->actual_num_seg_blocks++;
- }
+    for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {+ if (cyclic_refresh_segment_id_boosted(
+ seg_map[mi_row * cm->mi_cols + mi_col]))
+ cr->actual_num_seg_blocks++;
+ }
}
// Set golden frame update interval, for non-svc 1 pass CBR mode.
--- a/vp9/encoder/vp9_aq_cyclicrefresh.h
+++ b/vp9/encoder/vp9_aq_cyclicrefresh.h
@@ -78,6 +78,11 @@
int vp9_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr);
+static INLINE int cyclic_refresh_segment_id_boosted(int segment_id) {+ return segment_id == CR_SEGMENT_ID_BOOST1 ||
+ segment_id == CR_SEGMENT_ID_BOOST2;
+}
+
#ifdef __cplusplus
} // extern "C"
#endif
--- a/vp9/encoder/vp9_encodeframe.c
+++ b/vp9/encoder/vp9_encodeframe.c
@@ -416,7 +416,7 @@
tree_to_node(data, bsize, &vt);
// No 64x64 blocks on segments other than base (un-boosted) segment.
- if (segment_id != CR_SEGMENT_ID_BASE && bsize == BLOCK_64X64)
+ if (cyclic_refresh_segment_id_boosted(segment_id) && bsize == BLOCK_64X64)
return 0;
// For bsize=bsize_min (16x16/8x8 for 8x8/4x4 downsampling), select if
--- a/vp9/encoder/vp9_encoder.c
+++ b/vp9/encoder/vp9_encoder.c
@@ -52,6 +52,8 @@
#include "vp9/encoder/vp9_svc_layercontext.h"
#include "vp9/encoder/vp9_skin_detection.h"
+#define AM_SEGMENT_ID_INACTIVE 7
+#define AM_SEGMENT_ID_ACTIVE 0
#define SHARP_FILTER_QTHRESH 0 /* Q threshold for 8-tap sharp filter */
@@ -105,6 +107,71 @@
}
}
+// Mark all inactive blocks as active. Other segmentation features may be set
+// so memset cannot be used, instead only inactive blocks should be reset.
+void vp9_suppress_active_map(VP9_COMP *cpi) {+ unsigned char *const seg_map = cpi->segmentation_map;
+ int i;
+ if (cpi->active_map.enabled || cpi->active_map.update)
+ for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
+ if (seg_map[i] == AM_SEGMENT_ID_INACTIVE)
+ seg_map[i] = AM_SEGMENT_ID_ACTIVE;
+}
+
+void vp9_apply_active_map(VP9_COMP *cpi) {+ struct segmentation *const seg = &cpi->common.seg;
+ unsigned char *const seg_map = cpi->segmentation_map;
+ const unsigned char *const active_map = cpi->active_map.map;
+ int i;
+
+ assert(AM_SEGMENT_ID_ACTIVE == CR_SEGMENT_ID_BASE);
+
+  if (cpi->active_map.update) {+    if (cpi->active_map.enabled) {+ for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
+ if (seg_map[i] == AM_SEGMENT_ID_ACTIVE) seg_map[i] = active_map[i];
+ vp9_enable_segmentation(seg);
+ vp9_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
+    } else {+ vp9_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
+      if (seg->enabled) {+ seg->update_data = 1;
+ seg->update_map = 1;
+ }
+ }
+ cpi->active_map.update = 0;
+ }
+}
+
+int vp9_set_active_map(VP9_COMP* cpi,
+ unsigned char* new_map_16x16,
+ int rows,
+                       int cols) {+  if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {+ unsigned char *const active_map_8x8 = cpi->active_map.map;
+ const int mi_rows = cpi->common.mi_rows;
+ const int mi_cols = cpi->common.mi_cols;
+ cpi->active_map.update = 1;
+    if (new_map_16x16) {+ int r, c;
+      for (r = 0; r < mi_rows; ++r) {+        for (c = 0; c < mi_cols; ++c) {+ active_map_8x8[r * mi_cols + c] =
+ new_map_16x16[(r >> 1) * cols + (c >> 1)]
+ ? AM_SEGMENT_ID_ACTIVE
+ : AM_SEGMENT_ID_INACTIVE;
+ }
+ }
+ cpi->active_map.enabled = 1;
+    } else {+ cpi->active_map.enabled = 0;
+ }
+ return 0;
+  } else {+ return -1;
+ }
+}
+
 void vp9_set_high_precision_mv(VP9_COMP *cpi, int allow_high_precision_mv) {MACROBLOCK *const mb = &cpi->td.mb;
cpi->common.allow_high_precision_mv = allow_high_precision_mv;
@@ -233,6 +300,9 @@
vp9_cyclic_refresh_free(cpi->cyclic_refresh);
cpi->cyclic_refresh = NULL;
+ vpx_free(cpi->active_map.map);
+ cpi->active_map.map = NULL;
+
vp9_free_ref_frame_buffers(cm);
vp9_free_context_buffers(cm);
@@ -1429,6 +1499,7 @@
cpi->partition_search_skippable_frame = 0;
cpi->tile_data = NULL;
+ // TODO(aconverse): Realloc these tables on frame resize
// Create the encoder segmentation map and set all entries to 0
CHECK_MEM_ERROR(cm, cpi->segmentation_map,
vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
@@ -1437,6 +1508,9 @@
CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
vp9_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
+ CHECK_MEM_ERROR(cm, cpi->active_map.map,
+ vpx_calloc(cm->mi_rows * cm->mi_cols, 1));
+
// And a place holder structure is the coding context
// for use if we want to save and restore it
CHECK_MEM_ERROR(cm, cpi->coding_context.last_frame_seg_map_copy,
@@ -2831,6 +2905,7 @@
setup_frame(cpi);
+ vp9_suppress_active_map(cpi);
// Variance adaptive and in frame q adjustment experiments are mutually
// exclusive.
   if (cpi->oxcf.aq_mode == VARIANCE_AQ) {@@ -2840,6 +2915,8 @@
   } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) {vp9_cyclic_refresh_setup(cpi);
}
+ vp9_apply_active_map(cpi);
+
// transform / motion compensation build reconstruction frame
vp9_encode_frame(cpi);
@@ -4083,29 +4160,6 @@
#endif // !CONFIG_VP9_POSTPROC
vp9_clear_system_state();
return ret;
- }
-}
-
-int vp9_set_active_map(VP9_COMP *cpi, unsigned char *map, int rows, int cols) {-  if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {- const int mi_rows = cpi->common.mi_rows;
- const int mi_cols = cpi->common.mi_cols;
-    if (map) {- int r, c;
-      for (r = 0; r < mi_rows; r++) {-        for (c = 0; c < mi_cols; c++) {- cpi->segmentation_map[r * mi_cols + c] =
- !map[(r >> 1) * cols + (c >> 1)];
- }
- }
- vp9_enable_segfeature(&cpi->common.seg, 1, SEG_LVL_SKIP);
- vp9_enable_segmentation(&cpi->common.seg);
-    } else {- vp9_disable_segmentation(&cpi->common.seg);
- }
- return 0;
-  } else {- return -1;
}
}
--- a/vp9/encoder/vp9_encoder.h
+++ b/vp9/encoder/vp9_encoder.h
@@ -266,6 +266,12 @@
struct EncWorkerData;
+typedef struct ActiveMap {+ int enabled;
+ int update;
+ unsigned char *map;
+} ActiveMap;
+
 typedef struct VP9_COMP {QUANTS quants;
ThreadData td;
@@ -358,6 +364,7 @@
int segment_encode_breakout[MAX_SEGMENTS];
CYCLIC_REFRESH *cyclic_refresh;
+ ActiveMap active_map;
fractional_mv_step_fp *find_fractional_mv_step;
vp9_full_search_fn_t full_search_sad;
--- a/vp9/encoder/vp9_pickmode.c
+++ b/vp9/encoder/vp9_pickmode.c
@@ -232,7 +232,7 @@
     if (cpi->sf.partition_search_type == VAR_BASED_PARTITION) {if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ &&
- xd->mi[0].src_mi->mbmi.segment_id != CR_SEGMENT_ID_BASE)
+ cyclic_refresh_segment_id_boosted(xd->mi[0].src_mi->mbmi.segment_id))
xd->mi[0].src_mi->mbmi.tx_size = TX_8X8;
else if (xd->mi[0].src_mi->mbmi.tx_size > TX_16X16)
xd->mi[0].src_mi->mbmi.tx_size = TX_16X16;
--
⑨