Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /*
  2.  * Copyright © 2010 Intel Corporation
  3.  *
  4.  * Permission is hereby granted, free of charge, to any person obtaining a
  5.  * copy of this software and associated documentation files (the
  6.  * "Software"), to deal in the Software without restriction, including
  7.  * without limitation the rights to use, copy, modify, merge, publish,
  8.  * distribute, sub license, and/or sell copies of the Software, and to
  9.  * permit persons to whom the Software is furnished to do so, subject to
  10.  * the following conditions:
  11.  *
  12.  * The above copyright notice and this permission notice (including the
  13.  * next paragraph) shall be included in all copies or substantial portions
  14.  * of the Software.
  15.  *
  16.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  17.  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  18.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  19.  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
  20.  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  21.  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  22.  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  23.  *
  24.  * Authors:
  25.  *    Xiang Haihao <haihao.xiang@intel.com>
  26.  *
  27.  */
  28. #include "sysdeps.h"
  29.  
  30. #include "intel_batchbuffer.h"
  31. #include "intel_driver.h"
  32.  
  33. #include "i965_defines.h"
  34. #include "i965_drv_video.h"
  35. #include "i965_avc_bsd.h"
  36. #include "i965_media_h264.h"
  37. #include "i965_media.h"
  38. #include "i965_decoder_utils.h"
  39. #include "intel_media.h"
  40.  
  41. static void
  42. i965_avc_bsd_init_avc_bsd_surface(VADriverContextP ctx,
  43.                                   struct object_surface *obj_surface,
  44.                                   VAPictureParameterBufferH264 *pic_param,
  45.                                   struct i965_h264_context *i965_h264_context)
  46. {
  47.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  48.     GenAvcSurface *avc_bsd_surface = obj_surface->private_data;
  49.  
  50.     obj_surface->free_private_data = gen_free_avc_surface;
  51.  
  52.     if (!avc_bsd_surface) {
  53.         avc_bsd_surface = calloc(sizeof(GenAvcSurface), 1);
  54.         assert(avc_bsd_surface);
  55.         avc_bsd_surface->base.frame_store_id = -1;
  56.         assert((obj_surface->size & 0x3f) == 0);
  57.         obj_surface->private_data = avc_bsd_surface;
  58.     }
  59.  
  60.     avc_bsd_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
  61.                                         !pic_param->seq_fields.bits.direct_8x8_inference_flag);
  62.  
  63.     if (avc_bsd_surface->dmv_top == NULL) {
  64.         avc_bsd_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
  65.                                                 "direct mv w/r buffer",
  66.                                                 DMV_SIZE,
  67.                                                 0x1000);
  68.     }
  69.  
  70.     if (avc_bsd_surface->dmv_bottom_flag &&
  71.         avc_bsd_surface->dmv_bottom == NULL) {
  72.         avc_bsd_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
  73.                                                    "direct mv w/r buffer",
  74.                                                    DMV_SIZE,
  75.                                                    0x1000);
  76.     }
  77. }
  78.  
  79. static void
  80. i965_bsd_ind_obj_base_address(VADriverContextP ctx,
  81.                               struct decode_state *decode_state,
  82.                               int slice,
  83.                               struct i965_h264_context *i965_h264_context)
  84.                              
  85. {
  86.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  87.  
  88.     dri_bo *ind_bo = decode_state->slice_datas[slice]->bo;
  89.  
  90.     BEGIN_BCS_BATCH(batch, 3);
  91.     OUT_BCS_BATCH(batch, CMD_BSD_IND_OBJ_BASE_ADDR | (3 - 2));
  92.     OUT_BCS_RELOC(batch, ind_bo,
  93.                   I915_GEM_DOMAIN_INSTRUCTION, 0,
  94.                   0);
  95.     OUT_BCS_BATCH(batch, 0);
  96.     ADVANCE_BCS_BATCH(batch);
  97. }
  98.  
  99. static void
  100. i965_avc_bsd_img_state(VADriverContextP ctx,
  101.                        struct decode_state *decode_state,
  102.                        struct i965_h264_context *i965_h264_context)
  103. {
  104.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  105.     int qm_present_flag;
  106.     int img_struct;
  107.     int mbaff_frame_flag;
  108.     unsigned int avc_it_command_header;
  109.     unsigned int width_in_mbs, height_in_mbs;
  110.     VAPictureParameterBufferH264 *pic_param;
  111.  
  112.     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
  113.         qm_present_flag = 1;
  114.     else
  115.         qm_present_flag = 0; /* built-in QM matrices */
  116.  
  117.     assert(decode_state->pic_param && decode_state->pic_param->buffer);
  118.     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
  119.  
  120.     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
  121.         img_struct = 1;
  122.     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
  123.         img_struct = 3;
  124.     else
  125.         img_struct = 0;
  126.  
  127.     if ((img_struct & 0x1) == 0x1) {
  128.         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
  129.     } else {
  130.         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
  131.     }
  132.  
  133.     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
  134.         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
  135.         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
  136.     } else {
  137.         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
  138.     }
  139.  
  140.     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
  141.                         !pic_param->pic_fields.bits.field_pic_flag);
  142.  
  143.     width_in_mbs = ((pic_param->picture_width_in_mbs_minus1 + 1) & 0xff);
  144.     height_in_mbs = ((pic_param->picture_height_in_mbs_minus1 + 1) & 0xff); /* frame height */
  145.                                                                                
  146.     assert(!((width_in_mbs * height_in_mbs) & 0x8000)); /* hardware requirement */
  147.  
  148.     /* BSD unit doesn't support 4:2:2 and 4:4:4 picture */
  149.     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
  150.            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
  151.     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
  152.  
  153.     avc_it_command_header = (CMD_MEDIA_OBJECT_EX | (12 - 2));
  154.  
  155.     BEGIN_BCS_BATCH(batch, 6);
  156.     OUT_BCS_BATCH(batch, CMD_AVC_BSD_IMG_STATE | (6 - 2));
  157.     OUT_BCS_BATCH(batch,
  158.                   ((width_in_mbs * height_in_mbs) & 0x7fff));
  159.     OUT_BCS_BATCH(batch,
  160.                   (height_in_mbs << 16) |
  161.                   (width_in_mbs << 0));
  162.     OUT_BCS_BATCH(batch,
  163.                   ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
  164.                   ((pic_param->chroma_qp_index_offset & 0x1f) << 16) |
  165.                   (SCAN_RASTER_ORDER << 15) | /* AVC ILDB Data */
  166.                   (SCAN_SPECIAL_ORDER << 14) | /* AVC IT Command */
  167.                   (SCAN_RASTER_ORDER << 13) | /* AVC IT Data */
  168.                   (1 << 12) | /* always 1, hardware requirement */
  169.                   (qm_present_flag << 10) |
  170.                   (img_struct << 8) |
  171.                   (16 << 0)); /* FIXME: always support 16 reference frames ??? */
  172.     OUT_BCS_BATCH(batch,
  173.                   (RESIDUAL_DATA_OFFSET << 24) | /* residual data offset */
  174.                   (0 << 17) | /* don't overwrite SRT */
  175.                   (0 << 16) | /* Un-SRT (Unsynchronized Root Thread) */
  176.                   (0 << 12) | /* FIXME: no 16MV ??? */
  177.                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
  178.                   (i965_h264_context->enable_avc_ildb << 8)  | /* Enable ILDB writing output */
  179.                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
  180.                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
  181.                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
  182.                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
  183.                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
  184.                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
  185.                   (mbaff_frame_flag << 1) |
  186.                   (pic_param->pic_fields.bits.field_pic_flag << 0));
  187.     OUT_BCS_BATCH(batch, avc_it_command_header);
  188.     ADVANCE_BCS_BATCH(batch);
  189. }
  190.  
  191. static void
  192. i965_avc_bsd_qm_state(VADriverContextP ctx,
  193.                       struct decode_state *decode_state,
  194.                       struct i965_h264_context *i965_h264_context)
  195. {
  196.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  197.     int cmd_len;
  198.     VAIQMatrixBufferH264 *iq_matrix;
  199.     VAPictureParameterBufferH264 *pic_param;
  200.  
  201.     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
  202.         return;
  203.  
  204.     iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
  205.  
  206.     assert(decode_state->pic_param && decode_state->pic_param->buffer);
  207.     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
  208.  
  209.     cmd_len = 2 + 6 * 4; /* always load six 4x4 scaling matrices */
  210.  
  211.     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
  212.         cmd_len += 2 * 16; /* load two 8x8 scaling matrices */
  213.  
  214.     BEGIN_BCS_BATCH(batch, cmd_len);
  215.     OUT_BCS_BATCH(batch, CMD_AVC_BSD_QM_STATE | (cmd_len - 2));
  216.  
  217.     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
  218.         OUT_BCS_BATCH(batch,
  219.                       (0x0  << 8) | /* don't use default built-in matrices */
  220.                       (0xff << 0)); /* six 4x4 and two 8x8 scaling matrices */
  221.     else
  222.         OUT_BCS_BATCH(batch,
  223.                       (0x0  << 8) | /* don't use default built-in matrices */
  224.                       (0x3f << 0)); /* six 4x4 scaling matrices */
  225.  
  226.     intel_batchbuffer_data(batch, &iq_matrix->ScalingList4x4[0][0], 6 * 4 * 4);
  227.  
  228.     if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
  229.         intel_batchbuffer_data(batch, &iq_matrix->ScalingList8x8[0][0], 2 * 16 * 4);
  230.  
  231.     ADVANCE_BCS_BATCH(batch);
  232. }
  233.  
  234. static void
  235. i965_avc_bsd_slice_state(VADriverContextP ctx,
  236.                          VAPictureParameterBufferH264 *pic_param,
  237.                          VASliceParameterBufferH264 *slice_param,
  238.                          struct i965_h264_context *i965_h264_context)
  239. {
  240.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  241.     int present_flag, cmd_len, list, j;
  242.     uint8_t ref_idx_state[32];
  243.     char weightoffsets[32 * 6];
  244.  
  245.     /* don't issue SLICE_STATE for intra-prediction decoding */
  246.     if (slice_param->slice_type == SLICE_TYPE_I ||
  247.         slice_param->slice_type == SLICE_TYPE_SI)
  248.         return;
  249.  
  250.     cmd_len = 2;
  251.  
  252.     if (slice_param->slice_type == SLICE_TYPE_P ||
  253.         slice_param->slice_type == SLICE_TYPE_SP) {
  254.         present_flag = PRESENT_REF_LIST0;
  255.         cmd_len += 8;
  256.     } else {
  257.         present_flag = PRESENT_REF_LIST0 | PRESENT_REF_LIST1;
  258.         cmd_len += 16;
  259.     }
  260.  
  261.     if ((slice_param->slice_type == SLICE_TYPE_P ||
  262.          slice_param->slice_type == SLICE_TYPE_SP) &&
  263.         (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
  264.         present_flag |= PRESENT_WEIGHT_OFFSET_L0;
  265.         cmd_len += 48;
  266.     }
  267.  
  268.     if ((slice_param->slice_type == SLICE_TYPE_B) &&
  269.         (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
  270.         present_flag |= PRESENT_WEIGHT_OFFSET_L0 | PRESENT_WEIGHT_OFFSET_L1;
  271.         cmd_len += 96;
  272.     }
  273.  
  274.     BEGIN_BCS_BATCH(batch, cmd_len);
  275.     OUT_BCS_BATCH(batch, CMD_AVC_BSD_SLICE_STATE | (cmd_len - 2));
  276.     OUT_BCS_BATCH(batch, present_flag);
  277.  
  278.     for (list = 0; list < 2; list++) {
  279.         int flag, num_va_pics;
  280.         VAPictureH264 *va_pic;
  281.  
  282.         if (list == 0) {
  283.             flag        = PRESENT_REF_LIST0;
  284.             va_pic      = slice_param->RefPicList0;
  285.             num_va_pics = slice_param->num_ref_idx_l0_active_minus1 + 1;
  286.         } else {
  287.             flag        = PRESENT_REF_LIST1;
  288.             va_pic      = slice_param->RefPicList1;
  289.             num_va_pics = slice_param->num_ref_idx_l1_active_minus1 + 1;
  290.         }
  291.  
  292.         if (!(present_flag & flag))
  293.             continue;
  294.  
  295.         gen5_fill_avc_ref_idx_state(
  296.             ref_idx_state,
  297.             va_pic, num_va_pics,
  298.             i965_h264_context->fsid_list
  299.         );            
  300.         intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
  301.     }
  302.  
  303.     i965_h264_context->weight128_luma_l0 = 0;
  304.     i965_h264_context->weight128_luma_l1 = 0;
  305.     i965_h264_context->weight128_chroma_l0 = 0;
  306.     i965_h264_context->weight128_chroma_l1 = 0;
  307.  
  308.     i965_h264_context->weight128_offset0_flag = 0;
  309.     i965_h264_context->weight128_offset0 = 0;
  310.  
  311.     if (present_flag & PRESENT_WEIGHT_OFFSET_L0) {
  312.         for (j = 0; j < 32; j++) {
  313.             weightoffsets[j * 6 + 0] = slice_param->luma_offset_l0[j];
  314.             weightoffsets[j * 6 + 1] = slice_param->luma_weight_l0[j];
  315.             weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l0[j][0];
  316.             weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l0[j][0];
  317.             weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l0[j][1];
  318.             weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l0[j][1];
  319.  
  320.             if (pic_param->pic_fields.bits.weighted_pred_flag == 1 ||
  321.                 pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
  322.                 if (i965_h264_context->use_hw_w128) {
  323.                     if (slice_param->luma_weight_l0[j] == 128)
  324.                         i965_h264_context->weight128_luma_l0 |= (1 << j);
  325.  
  326.                     if (slice_param->chroma_weight_l0[j][0] == 128 ||
  327.                         slice_param->chroma_weight_l0[j][1] == 128)
  328.                         i965_h264_context->weight128_chroma_l0 |= (1 << j);
  329.                 } else {
  330.                     /* FIXME: workaround for weight 128 */
  331.                     if (slice_param->luma_weight_l0[j] == 128 ||
  332.                         slice_param->chroma_weight_l0[j][0] == 128 ||
  333.                         slice_param->chroma_weight_l0[j][1] == 128)
  334.                         i965_h264_context->weight128_offset0_flag = 1;
  335.                 }
  336.             }
  337.         }
  338.  
  339.         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
  340.     }
  341.  
  342.     if (present_flag & PRESENT_WEIGHT_OFFSET_L1) {
  343.         for (j = 0; j < 32; j++) {
  344.             weightoffsets[j * 6 + 0] = slice_param->luma_offset_l1[j];
  345.             weightoffsets[j * 6 + 1] = slice_param->luma_weight_l1[j];
  346.             weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l1[j][0];
  347.             weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l1[j][0];
  348.             weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l1[j][1];
  349.             weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l1[j][1];
  350.  
  351.             if (pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
  352.                 if (i965_h264_context->use_hw_w128) {
  353.                     if (slice_param->luma_weight_l1[j] == 128)
  354.                         i965_h264_context->weight128_luma_l1 |= (1 << j);
  355.  
  356.                     if (slice_param->chroma_weight_l1[j][0] == 128 ||
  357.                         slice_param->chroma_weight_l1[j][1] == 128)
  358.                         i965_h264_context->weight128_chroma_l1 |= (1 << j);
  359.                 } else {
  360.                     if (slice_param->luma_weight_l0[j] == 128 ||
  361.                         slice_param->chroma_weight_l0[j][0] == 128 ||
  362.                         slice_param->chroma_weight_l0[j][1] == 128)
  363.                         i965_h264_context->weight128_offset0_flag = 1;
  364.                 }
  365.             }
  366.         }
  367.  
  368.         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
  369.     }
  370.  
  371.     ADVANCE_BCS_BATCH(batch);
  372. }
  373.  
  374. static void
  375. i965_avc_bsd_buf_base_state(VADriverContextP ctx,
  376.                             struct decode_state *decode_state,
  377.                             VAPictureParameterBufferH264 *pic_param,
  378.                             VASliceParameterBufferH264 *slice_param,
  379.                             struct i965_h264_context *i965_h264_context)
  380. {
  381.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  382.     struct i965_avc_bsd_context *i965_avc_bsd_context;
  383.     int i;
  384.     VAPictureH264 *va_pic;
  385.     struct object_surface *obj_surface;
  386.     GenAvcSurface *avc_bsd_surface;
  387.  
  388.     i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
  389.  
  390.     BEGIN_BCS_BATCH(batch, 74);
  391.     OUT_BCS_BATCH(batch, CMD_AVC_BSD_BUF_BASE_STATE | (74 - 2));
  392.     OUT_BCS_RELOC(batch, i965_avc_bsd_context->bsd_raw_store.bo,
  393.                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  394.                   0);
  395.     OUT_BCS_RELOC(batch, i965_avc_bsd_context->mpr_row_store.bo,
  396.                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  397.                   0);
  398.     OUT_BCS_RELOC(batch, i965_h264_context->avc_it_command_mb_info.bo,
  399.                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  400.                   i965_h264_context->avc_it_command_mb_info.mbs * i965_h264_context->use_avc_hw_scoreboard * MB_CMD_IN_BYTES);
  401.     OUT_BCS_RELOC(batch, i965_h264_context->avc_it_data.bo,
  402.                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  403.                   (i965_h264_context->avc_it_data.write_offset << 6));
  404.  
  405.     if (i965_h264_context->enable_avc_ildb)
  406.         OUT_BCS_RELOC(batch, i965_h264_context->avc_ildb_data.bo,
  407.                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  408.                       0);
  409.     else
  410.         OUT_BCS_BATCH(batch, 0);
  411.  
  412.     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
  413.         obj_surface = i965_h264_context->fsid_list[i].obj_surface;
  414.         if (obj_surface && obj_surface->private_data) {
  415.             avc_bsd_surface = obj_surface->private_data;
  416.            
  417.             OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
  418.                           I915_GEM_DOMAIN_INSTRUCTION, 0,
  419.                           0);
  420.  
  421.             if (avc_bsd_surface->dmv_bottom_flag == 1)
  422.                 OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_bottom,
  423.                               I915_GEM_DOMAIN_INSTRUCTION, 0,
  424.                               0);
  425.             else
  426.                 OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
  427.                               I915_GEM_DOMAIN_INSTRUCTION, 0,
  428.                               0);
  429.         } else {
  430.             OUT_BCS_BATCH(batch, 0);
  431.             OUT_BCS_BATCH(batch, 0);
  432.         }
  433.     }
  434.  
  435.     va_pic = &pic_param->CurrPic;
  436.     obj_surface = decode_state->render_object;
  437.     if (pic_param->pic_fields.bits.reference_pic_flag)
  438.         obj_surface->flags |= SURFACE_REFERENCED;
  439.     else
  440.         obj_surface->flags &= ~SURFACE_REFERENCED;
  441.     i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
  442.  
  443.     /* initial uv component for YUV400 case */
  444.     if (pic_param->seq_fields.bits.chroma_format_idc == 0) {
  445.          unsigned int uv_offset = obj_surface->width * obj_surface->height;
  446.          unsigned int uv_size   = obj_surface->width * obj_surface->height / 2;
  447.  
  448.          dri_bo_map(obj_surface->bo, 1);
  449.          memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
  450.          dri_bo_unmap(obj_surface->bo);
  451.     }
  452.  
  453.     i965_avc_bsd_init_avc_bsd_surface(ctx, obj_surface, pic_param, i965_h264_context);
  454.     avc_bsd_surface = obj_surface->private_data;
  455.  
  456.     OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
  457.                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  458.                   0);
  459.  
  460.     if (avc_bsd_surface->dmv_bottom_flag == 1)
  461.         OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_bottom,
  462.                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  463.                       0);
  464.     else
  465.         OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
  466.                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
  467.                       0);
  468.  
  469.     /* POC List */
  470.     for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
  471.         obj_surface = i965_h264_context->fsid_list[i].obj_surface;
  472.  
  473.         if (obj_surface) {
  474.             const VAPictureH264 * const va_pic = avc_find_picture(
  475.                 obj_surface->base.id, pic_param->ReferenceFrames,
  476.                 ARRAY_ELEMS(pic_param->ReferenceFrames));
  477.  
  478.             assert(va_pic != NULL);
  479.             OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
  480.             OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
  481.         } else {
  482.             OUT_BCS_BATCH(batch, 0);
  483.             OUT_BCS_BATCH(batch, 0);
  484.         }
  485.     }
  486.  
  487.     va_pic = &pic_param->CurrPic;
  488.     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
  489.     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
  490.  
  491.     ADVANCE_BCS_BATCH(batch);
  492. }
  493.  
  494. static void
  495. g4x_avc_bsd_object(VADriverContextP ctx,
  496.                    struct decode_state *decode_state,
  497.                    VAPictureParameterBufferH264 *pic_param,
  498.                    VASliceParameterBufferH264 *slice_param,
  499.                    int slice_index,
  500.                    struct i965_h264_context *i965_h264_context)
  501. {
  502.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  503.     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
  504.     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
  505.  
  506.     if (slice_param) {
  507.         int encrypted, counter_value, cmd_len;
  508.         int slice_hor_pos, slice_ver_pos;
  509.         int num_ref_idx_l0, num_ref_idx_l1;
  510.         int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
  511.                              pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
  512.         unsigned int slice_data_bit_offset;
  513.         int weighted_pred_idc = 0;
  514.         int first_mb_in_slice = 0;
  515.         int slice_type;
  516.  
  517.         encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
  518.  
  519.         if (encrypted) {
  520.             cmd_len = 9;
  521.             counter_value = 0; /* FIXME: ??? */
  522.         } else
  523.             cmd_len = 8;
  524.  
  525.  
  526.         slice_data_bit_offset = avc_get_first_mb_bit_offset_with_epb(
  527.             decode_state->slice_datas[slice_index]->bo,
  528.             slice_param,
  529.             pic_param->pic_fields.bits.entropy_coding_mode_flag
  530.         );
  531.  
  532.         if (slice_param->slice_type == SLICE_TYPE_I ||
  533.             slice_param->slice_type == SLICE_TYPE_SI)
  534.             slice_type = SLICE_TYPE_I;
  535.         else if (slice_param->slice_type == SLICE_TYPE_P ||
  536.                  slice_param->slice_type == SLICE_TYPE_SP)
  537.             slice_type = SLICE_TYPE_P;
  538.         else {
  539.             assert(slice_param->slice_type == SLICE_TYPE_B);
  540.             slice_type = SLICE_TYPE_B;
  541.         }
  542.  
  543.         if (slice_type == SLICE_TYPE_I) {
  544.             assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
  545.             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
  546.             num_ref_idx_l0 = 0;
  547.             num_ref_idx_l1 = 0;
  548.         } else if (slice_type == SLICE_TYPE_P) {
  549.             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
  550.             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
  551.             num_ref_idx_l1 = 0;
  552.         } else {
  553.             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
  554.             num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
  555.         }
  556.  
  557.         if (slice_type == SLICE_TYPE_P)
  558.             weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
  559.         else if (slice_type == SLICE_TYPE_B)
  560.             weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
  561.  
  562.         first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
  563.         slice_hor_pos = first_mb_in_slice % width_in_mbs;
  564.         slice_ver_pos = first_mb_in_slice / width_in_mbs;
  565.  
  566.         BEGIN_BCS_BATCH(batch, cmd_len);
  567.         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (cmd_len - 2));
  568.         OUT_BCS_BATCH(batch,
  569.                       (encrypted << 31) |
  570.                       ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
  571.         OUT_BCS_BATCH(batch,
  572.                       (slice_param->slice_data_offset +
  573.                        (slice_data_bit_offset >> 3)));
  574.         OUT_BCS_BATCH(batch,
  575.                       (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
  576.                       (0 << 14) | /* ignore BSDPrematureComplete Error handling */
  577.                       (0 << 13) | /* FIXME: ??? */
  578.                       (0 << 12) | /* ignore MPR Error handling */
  579.                       (0 << 10) | /* ignore Entropy Error handling */
  580.                       (0 << 8)  | /* ignore MB Header Error handling */
  581.                       (slice_type << 0));
  582.         OUT_BCS_BATCH(batch,
  583.                       (num_ref_idx_l1 << 24) |
  584.                       (num_ref_idx_l0 << 16) |
  585.                       (slice_param->chroma_log2_weight_denom << 8) |
  586.                       (slice_param->luma_log2_weight_denom << 0));
  587.         OUT_BCS_BATCH(batch,
  588.                       (weighted_pred_idc << 30) |
  589.                       (slice_param->direct_spatial_mv_pred_flag << 29) |
  590.                       (slice_param->disable_deblocking_filter_idc << 27) |
  591.                       (slice_param->cabac_init_idc << 24) |
  592.                       ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
  593.                       ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
  594.                       ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
  595.         OUT_BCS_BATCH(batch,
  596.                       (slice_ver_pos << 24) |
  597.                       (slice_hor_pos << 16) |
  598.                       (first_mb_in_slice << 0));
  599.         OUT_BCS_BATCH(batch,
  600.                       (1 << 7) |
  601.                       ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
  602.  
  603.         if (encrypted) {
  604.             OUT_BCS_BATCH(batch, counter_value);
  605.         }
  606.  
  607.         ADVANCE_BCS_BATCH(batch);
  608.     } else {
  609.         BEGIN_BCS_BATCH(batch, 8);
  610.         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (8 - 2));
  611.         OUT_BCS_BATCH(batch, 0); /* indirect data length for phantom slice is 0 */
  612.         OUT_BCS_BATCH(batch, 0); /* indirect data start address for phantom slice is 0 */
  613.         OUT_BCS_BATCH(batch, 0);
  614.         OUT_BCS_BATCH(batch, 0);
  615.         OUT_BCS_BATCH(batch, 0);
  616.         OUT_BCS_BATCH(batch, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
  617.         OUT_BCS_BATCH(batch, 0);
  618.         ADVANCE_BCS_BATCH(batch);
  619.     }
  620. }
  621.  
  622. static void
  623. ironlake_avc_bsd_object(VADriverContextP ctx,
  624.                         struct decode_state *decode_state,
  625.                         VAPictureParameterBufferH264 *pic_param,
  626.                         VASliceParameterBufferH264 *slice_param,
  627.                         int slice_index,
  628.                         struct i965_h264_context *i965_h264_context)
  629. {
  630.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  631.     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
  632.     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
  633.  
  634.     if (slice_param) {
  635.         int encrypted, counter_value;
  636.         int slice_hor_pos, slice_ver_pos;
  637.         int num_ref_idx_l0, num_ref_idx_l1;
  638.         int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
  639.                              pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
  640.         unsigned int slice_data_bit_offset;
  641.         int weighted_pred_idc = 0;
  642.         int first_mb_in_slice;
  643.         int slice_type;
  644.  
  645.         encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
  646.  
  647.         if (encrypted) {
  648.             counter_value = 0; /* FIXME: ??? */
  649.         } else
  650.             counter_value = 0;
  651.  
  652.         slice_data_bit_offset = avc_get_first_mb_bit_offset_with_epb(
  653.             decode_state->slice_datas[slice_index]->bo,
  654.             slice_param,
  655.             pic_param->pic_fields.bits.entropy_coding_mode_flag
  656.         );
  657.  
  658.         if (slice_param->slice_type == SLICE_TYPE_I ||
  659.             slice_param->slice_type == SLICE_TYPE_SI)
  660.             slice_type = SLICE_TYPE_I;
  661.         else if (slice_param->slice_type == SLICE_TYPE_P ||
  662.                  slice_param->slice_type == SLICE_TYPE_SP)
  663.             slice_type = SLICE_TYPE_P;
  664.         else {
  665.             assert(slice_param->slice_type == SLICE_TYPE_B);
  666.             slice_type = SLICE_TYPE_B;
  667.         }
  668.  
  669.         if (slice_type == SLICE_TYPE_I) {
  670.             assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
  671.             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
  672.             num_ref_idx_l0 = 0;
  673.             num_ref_idx_l1 = 0;
  674.         } else if (slice_type == SLICE_TYPE_P) {
  675.             assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
  676.             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
  677.             num_ref_idx_l1 = 0;
  678.         } else {
  679.             num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
  680.             num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
  681.         }
  682.  
  683.         if (slice_type == SLICE_TYPE_P)
  684.             weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
  685.         else if (slice_type == SLICE_TYPE_B)
  686.             weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
  687.  
  688.         first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
  689.         slice_hor_pos = first_mb_in_slice % width_in_mbs;
  690.         slice_ver_pos = first_mb_in_slice / width_in_mbs;
  691.  
  692.         BEGIN_BCS_BATCH(batch, 16);
  693.         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (16 - 2));
  694.         OUT_BCS_BATCH(batch,
  695.                       (encrypted << 31) |
  696.                       (0 << 30) | /* FIXME: packet based bit stream */
  697.                       (0 << 29) | /* FIXME: packet format */
  698.                       ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
  699.         OUT_BCS_BATCH(batch,
  700.                       (slice_param->slice_data_offset +
  701.                        (slice_data_bit_offset >> 3)));
  702.         OUT_BCS_BATCH(batch,
  703.                       (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
  704.                       (0 << 14) | /* ignore BSDPrematureComplete Error handling */
  705.                       (0 << 13) | /* FIXME: ??? */
  706.                       (0 << 12) | /* ignore MPR Error handling */
  707.                       (0 << 10) | /* ignore Entropy Error handling */
  708.                       (0 << 8)  | /* ignore MB Header Error handling */
  709.                       (slice_type << 0));
  710.         OUT_BCS_BATCH(batch,
  711.                       (num_ref_idx_l1 << 24) |
  712.                       (num_ref_idx_l0 << 16) |
  713.                       (slice_param->chroma_log2_weight_denom << 8) |
  714.                       (slice_param->luma_log2_weight_denom << 0));
  715.         OUT_BCS_BATCH(batch,
  716.                       (weighted_pred_idc << 30) |
  717.                       (slice_param->direct_spatial_mv_pred_flag << 29) |
  718.                       (slice_param->disable_deblocking_filter_idc << 27) |
  719.                       (slice_param->cabac_init_idc << 24) |
  720.                       ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
  721.                       ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
  722.                       ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
  723.         OUT_BCS_BATCH(batch,
  724.                       (slice_ver_pos << 24) |
  725.                       (slice_hor_pos << 16) |
  726.                       (first_mb_in_slice << 0));
  727.         OUT_BCS_BATCH(batch,
  728.                       (1 << 7) |
  729.                       ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
  730.         OUT_BCS_BATCH(batch, counter_value);
  731.        
  732.         /* FIXME: dw9-dw11 */
  733.         OUT_BCS_BATCH(batch, 0);
  734.         OUT_BCS_BATCH(batch, 0);
  735.         OUT_BCS_BATCH(batch, 0);
  736.         OUT_BCS_BATCH(batch, i965_h264_context->weight128_luma_l0);
  737.         OUT_BCS_BATCH(batch, i965_h264_context->weight128_luma_l1);
  738.         OUT_BCS_BATCH(batch, i965_h264_context->weight128_chroma_l0);
  739.         OUT_BCS_BATCH(batch, i965_h264_context->weight128_chroma_l1);
  740.  
  741.         ADVANCE_BCS_BATCH(batch);
  742.     } else {
  743.         BEGIN_BCS_BATCH(batch, 16);
  744.         OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (16 - 2));
  745.         OUT_BCS_BATCH(batch, 0); /* indirect data length for phantom slice is 0 */
  746.         OUT_BCS_BATCH(batch, 0); /* indirect data start address for phantom slice is 0 */
  747.         OUT_BCS_BATCH(batch, 0);
  748.         OUT_BCS_BATCH(batch, 0);
  749.         OUT_BCS_BATCH(batch, 0);
  750.         OUT_BCS_BATCH(batch, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
  751.         OUT_BCS_BATCH(batch, 0);
  752.         OUT_BCS_BATCH(batch, 0);
  753.         OUT_BCS_BATCH(batch, 0);
  754.         OUT_BCS_BATCH(batch, 0);
  755.         OUT_BCS_BATCH(batch, 0);
  756.         OUT_BCS_BATCH(batch, 0);
  757.         OUT_BCS_BATCH(batch, 0);
  758.         OUT_BCS_BATCH(batch, 0);
  759.         OUT_BCS_BATCH(batch, 0);
  760.         ADVANCE_BCS_BATCH(batch);
  761.     }
  762. }
  763.  
  764. static void
  765. i965_avc_bsd_object(VADriverContextP ctx,
  766.                     struct decode_state *decode_state,
  767.                     VAPictureParameterBufferH264 *pic_param,
  768.                     VASliceParameterBufferH264 *slice_param,
  769.                     int slice_index,
  770.                     struct i965_h264_context *i965_h264_context)
  771. {
  772.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  773.  
  774.     if (IS_IRONLAKE(i965->intel.device_info))
  775.         ironlake_avc_bsd_object(ctx, decode_state, pic_param, slice_param, slice_index, i965_h264_context);
  776.     else
  777.         g4x_avc_bsd_object(ctx, decode_state, pic_param, slice_param, slice_index, i965_h264_context);
  778. }
  779.  
  780. static void
  781. i965_avc_bsd_phantom_slice(VADriverContextP ctx,
  782.                            struct decode_state *decode_state,
  783.                            VAPictureParameterBufferH264 *pic_param,
  784.                            struct i965_h264_context *i965_h264_context)
  785. {
  786.     i965_avc_bsd_object(ctx, decode_state, pic_param, NULL, 0, i965_h264_context);
  787. }
  788.  
  789. void
  790. i965_avc_bsd_pipeline(VADriverContextP ctx, struct decode_state *decode_state, void *h264_context)
  791. {
  792.     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
  793.     struct intel_batchbuffer *batch = i965_h264_context->batch;
  794.     VAPictureParameterBufferH264 *pic_param;
  795.     VASliceParameterBufferH264 *slice_param;
  796.     int i, j;
  797.  
  798.     assert(decode_state->pic_param && decode_state->pic_param->buffer);
  799.     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
  800.     intel_update_avc_frame_store_index(ctx, decode_state, pic_param,
  801.         i965_h264_context->fsid_list, &i965_h264_context->fs_ctx);
  802.  
  803.     i965_h264_context->enable_avc_ildb = 0;
  804.     i965_h264_context->picture.i_flag = 1;
  805.  
  806.     for (j = 0; j < decode_state->num_slice_params && i965_h264_context->enable_avc_ildb == 0; j++) {
  807.         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
  808.         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
  809.  
  810.         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
  811.             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
  812.             assert((slice_param->slice_type == SLICE_TYPE_I) ||
  813.                    (slice_param->slice_type == SLICE_TYPE_SI) ||
  814.                    (slice_param->slice_type == SLICE_TYPE_P) ||
  815.                    (slice_param->slice_type == SLICE_TYPE_SP) ||
  816.                    (slice_param->slice_type == SLICE_TYPE_B));
  817.  
  818.             if (slice_param->disable_deblocking_filter_idc != 1) {
  819.                 i965_h264_context->enable_avc_ildb = 1;
  820.                 break;
  821.             }
  822.  
  823.             slice_param++;
  824.         }
  825.     }
  826.  
  827.     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
  828.  
  829.     i965_avc_bsd_img_state(ctx, decode_state, i965_h264_context);
  830.     i965_avc_bsd_qm_state(ctx, decode_state, i965_h264_context);
  831.  
  832.     for (j = 0; j < decode_state->num_slice_params; j++) {
  833.         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
  834.         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
  835.  
  836.         i965_bsd_ind_obj_base_address(ctx, decode_state, j, i965_h264_context);
  837.  
  838.         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
  839.             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
  840.             assert((slice_param->slice_type == SLICE_TYPE_I) ||
  841.                    (slice_param->slice_type == SLICE_TYPE_SI) ||
  842.                    (slice_param->slice_type == SLICE_TYPE_P) ||
  843.                    (slice_param->slice_type == SLICE_TYPE_SP) ||
  844.                    (slice_param->slice_type == SLICE_TYPE_B));
  845.  
  846.             if (i965_h264_context->picture.i_flag &&
  847.                 (slice_param->slice_type != SLICE_TYPE_I ||
  848.                  slice_param->slice_type != SLICE_TYPE_SI))
  849.                 i965_h264_context->picture.i_flag = 0;
  850.  
  851.             i965_avc_bsd_slice_state(ctx, pic_param, slice_param, i965_h264_context);
  852.             i965_avc_bsd_buf_base_state(ctx, decode_state, pic_param, slice_param, i965_h264_context);
  853.             i965_avc_bsd_object(ctx, decode_state, pic_param, slice_param, j, i965_h264_context);
  854.             slice_param++;
  855.         }
  856.     }
  857.  
  858.     i965_avc_bsd_phantom_slice(ctx, decode_state, pic_param, i965_h264_context);
  859.     intel_batchbuffer_emit_mi_flush(batch);
  860.     intel_batchbuffer_end_atomic(batch);
  861.     intel_batchbuffer_flush(batch);
  862. }
  863.  
  864. void
  865. i965_avc_bsd_decode_init(VADriverContextP ctx, void *h264_context)
  866. {
  867.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  868.     struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
  869.     struct i965_avc_bsd_context *i965_avc_bsd_context;
  870.     dri_bo *bo;
  871.  
  872.     assert(i965_h264_context);
  873.     i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
  874.  
  875.     dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
  876.     bo = dri_bo_alloc(i965->intel.bufmgr,
  877.                       "bsd raw store",
  878.                       0x3000, /* at least 11520 bytes to support 120 MBs per row */
  879.                       64);
  880.     assert(bo);
  881.     i965_avc_bsd_context->bsd_raw_store.bo = bo;
  882.  
  883.     dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
  884.     bo = dri_bo_alloc(i965->intel.bufmgr,
  885.                       "mpr row store",
  886.                       0x2000, /* at least 7680 bytes to support 120 MBs per row */
  887.                       64);
  888.     assert(bo);
  889.     i965_avc_bsd_context->mpr_row_store.bo = bo;
  890. }
  891.  
  892. Bool
  893. i965_avc_bsd_ternimate(struct i965_avc_bsd_context *i965_avc_bsd_context)
  894. {
  895.     dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
  896.     dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
  897.  
  898.     return True;
  899. }
  900.