Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright (C) 2006-2012 Intel Corporation
  3.  *
  4.  * Permission is hereby granted, free of charge, to any person obtaining a
  5.  * copy of this software and associated documentation files (the "Software"),
  6.  * to deal in the Software without restriction, including without limitation
  7.  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8.  * and/or sell copies of the Software, and to permit persons to whom the
  9.  * Software is furnished to do so, subject to the following conditions:
  10.  *
  11.  * The above copyright notice and this permission notice (including the next
  12.  * paragraph) shall be included in all copies or substantial portions of the
  13.  * Software.
  14.  *
  15.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16.  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17.  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18.  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19.  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20.  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  21.  * DEALINGS IN THE SOFTWARE.
  22.  */
  23.  
  24. #include "sysdeps.h"
  25. #include <alloca.h>
  26. #include "intel_batchbuffer.h"
  27. #include "i965_decoder_utils.h"
  28. #include "i965_drv_video.h"
  29. #include "i965_defines.h"
  30.  
  31. /* Set reference surface if backing store exists */
  32. static inline int
  33. set_ref_frame(
  34.     struct i965_driver_data *i965,
  35.     GenFrameStore           *ref_frame,
  36.     VASurfaceID              va_surface
  37. )
  38. {
  39.     struct object_surface *obj_surface;
  40.  
  41.     if (va_surface == VA_INVALID_ID)
  42.         return 0;
  43.  
  44.     obj_surface = SURFACE(va_surface);
  45.     if (!obj_surface || !obj_surface->bo)
  46.         return 0;
  47.  
  48.     ref_frame->surface_id = va_surface;
  49.     return 1;
  50. }
  51.  
  52. /* Check wether codec layer incorrectly fills in slice_vertical_position */
  53. int
  54. mpeg2_wa_slice_vertical_position(
  55.     struct decode_state           *decode_state,
  56.     VAPictureParameterBufferMPEG2 *pic_param
  57. )
  58. {
  59.     unsigned int i, j, mb_height, vpos, last_vpos = 0;
  60.  
  61.     /* Assume progressive sequence if we got a progressive frame */
  62.     if (pic_param->picture_coding_extension.bits.progressive_frame)
  63.         return 0;
  64.  
  65.     /* Wait for a field coded picture */
  66.     if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
  67.         return -1;
  68.  
  69.     assert(decode_state && decode_state->slice_params);
  70.  
  71.     mb_height = (pic_param->vertical_size + 31) / 32;
  72.  
  73.     for (j = 0; j < decode_state->num_slice_params; j++) {
  74.         struct buffer_store * const buffer_store =
  75.             decode_state->slice_params[j];
  76.  
  77.         for (i = 0; i < buffer_store->num_elements; i++) {
  78.             VASliceParameterBufferMPEG2 * const slice_param =
  79.                 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
  80.  
  81.             vpos = slice_param->slice_vertical_position;
  82.             if (vpos >= mb_height || vpos == last_vpos + 2) {
  83.                 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
  84.                 return 1;
  85.             }
  86.             last_vpos = vpos;
  87.         }
  88.     }
  89.     return 0;
  90. }
  91.  
  92. /* Build MPEG-2 reference frames array */
  93. void
  94. mpeg2_set_reference_surfaces(
  95.     VADriverContextP               ctx,
  96.     GenFrameStore                  ref_frames[MAX_GEN_REFERENCE_FRAMES],
  97.     struct decode_state           *decode_state,
  98.     VAPictureParameterBufferMPEG2 *pic_param
  99. )
  100. {
  101.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  102.     VASurfaceID va_surface;
  103.     unsigned pic_structure, is_second_field, n = 0;
  104.  
  105.     pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
  106.     is_second_field = pic_structure != MPEG_FRAME &&
  107.         !pic_param->picture_coding_extension.bits.is_first_field;
  108.  
  109.     ref_frames[0].surface_id = VA_INVALID_ID;
  110.  
  111.     /* Reference frames are indexed by frame store ID  (0:top, 1:bottom) */
  112.     switch (pic_param->picture_coding_type) {
  113.     case MPEG_P_PICTURE:
  114.         if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
  115.             va_surface = decode_state->current_render_target;
  116.             n += set_ref_frame(i965, &ref_frames[n], va_surface);
  117.         }
  118.         va_surface = pic_param->forward_reference_picture;
  119.         n += set_ref_frame(i965, &ref_frames[n], va_surface);
  120.         break;
  121.  
  122.     case MPEG_B_PICTURE:
  123.         va_surface = pic_param->forward_reference_picture;
  124.         n += set_ref_frame(i965, &ref_frames[n], va_surface);
  125.         va_surface = pic_param->backward_reference_picture;
  126.         n += set_ref_frame(i965, &ref_frames[n], va_surface);
  127.         break;
  128.     }
  129.  
  130.     while (n != 2)
  131.         ref_frames[n++].surface_id = ref_frames[0].surface_id;
  132.  
  133.     if (pic_param->picture_coding_extension.bits.progressive_frame)
  134.         return;
  135.  
  136.     ref_frames[2].surface_id = VA_INVALID_ID;
  137.  
  138.     /* Bottom field pictures used as reference */
  139.     switch (pic_param->picture_coding_type) {
  140.     case MPEG_P_PICTURE:
  141.         if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
  142.             va_surface = decode_state->current_render_target;
  143.             n += set_ref_frame(i965, &ref_frames[n], va_surface);
  144.         }
  145.         va_surface = pic_param->forward_reference_picture;
  146.         n += set_ref_frame(i965, &ref_frames[n], va_surface);
  147.         break;
  148.  
  149.     case MPEG_B_PICTURE:
  150.         va_surface = pic_param->forward_reference_picture;
  151.         n += set_ref_frame(i965, &ref_frames[n], va_surface);
  152.         va_surface = pic_param->backward_reference_picture;
  153.         n += set_ref_frame(i965, &ref_frames[n], va_surface);
  154.         break;
  155.     }
  156.  
  157.     while (n != 4)
  158.         ref_frames[n++].surface_id = ref_frames[2].surface_id;
  159. }
  160.  
  161. /* Generate flat scaling matrices for H.264 decoding */
  162. void
  163. avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
  164. {
  165.     /* Flat_4x4_16 */
  166.     memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
  167.  
  168.     /* Flat_8x8_16 */
  169.     memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
  170. }
  171.  
  172. /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
  173. /* XXX: slice_data_bit_offset does not account for EPB */
  174. unsigned int
  175. avc_get_first_mb_bit_offset(
  176.     dri_bo                     *slice_data_bo,
  177.     VASliceParameterBufferH264 *slice_param,
  178.     unsigned int                mode_flag
  179. )
  180. {
  181.     unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
  182.  
  183.     if (mode_flag == ENTROPY_CABAC)
  184.         slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
  185.     return slice_data_bit_offset;
  186. }
  187.  
  188. /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
  189. /* XXX: slice_data_bit_offset does not account for EPB */
  190. unsigned int
  191. avc_get_first_mb_bit_offset_with_epb(
  192.     dri_bo                     *slice_data_bo,
  193.     VASliceParameterBufferH264 *slice_param,
  194.     unsigned int                mode_flag
  195. )
  196. {
  197.     unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
  198.     unsigned int out_slice_data_bit_offset;
  199.     unsigned int i, j, n, buf_size, data_size, header_size;
  200.     uint8_t *buf;
  201.     int ret;
  202.  
  203.     header_size = slice_param->slice_data_bit_offset / 8;
  204.     data_size   = slice_param->slice_data_size - slice_param->slice_data_offset;
  205.     buf_size    = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
  206.     if (buf_size > data_size)
  207.         buf_size = data_size;
  208.  
  209.     buf = alloca(buf_size);
  210.     ret = dri_bo_get_subdata(
  211.         slice_data_bo, slice_param->slice_data_offset,
  212.         buf_size, buf
  213.     );
  214.     assert(ret == 0);
  215.  
  216.     for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
  217.         if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
  218.             i += 2, j++, n++;
  219.     }
  220.     out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
  221.  
  222.     if (mode_flag == ENTROPY_CABAC)
  223.         out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
  224.     return out_slice_data_bit_offset;
  225. }
  226.  
  227. static inline uint8_t
  228. get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
  229. {
  230.     const unsigned int is_long_term =
  231.         !!(va_pic->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE);
  232.     const unsigned int is_top_field =
  233.         !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
  234.     const unsigned int is_bottom_field =
  235.         !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
  236.  
  237.     return ((is_long_term                         << 6) |
  238.             ((is_top_field ^ is_bottom_field ^ 1) << 5) |
  239.             (frame_store_id                       << 1) |
  240.             ((is_top_field ^ 1) & is_bottom_field));
  241. }
  242.  
  243. /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
  244. void
  245. gen5_fill_avc_ref_idx_state(
  246.     uint8_t             state[32],
  247.     const VAPictureH264 ref_list[32],
  248.     unsigned int        ref_list_count,
  249.     const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
  250. )
  251. {
  252.     unsigned int i, n, frame_idx;
  253.  
  254.     for (i = 0, n = 0; i < ref_list_count; i++) {
  255.         const VAPictureH264 * const va_pic = &ref_list[i];
  256.  
  257.         if (va_pic->flags & VA_PICTURE_H264_INVALID)
  258.             continue;
  259.  
  260.         for (frame_idx = 0; frame_idx < MAX_GEN_REFERENCE_FRAMES; frame_idx++) {
  261.             const GenFrameStore * const fs = &frame_store[frame_idx];
  262.             if (fs->surface_id != VA_INVALID_ID &&
  263.                 fs->surface_id == va_pic->picture_id) {
  264.                 assert(frame_idx == fs->frame_store_id);
  265.                 break;
  266.             }
  267.         }
  268.         assert(frame_idx < MAX_GEN_REFERENCE_FRAMES);
  269.         state[n++] = get_ref_idx_state_1(va_pic, frame_idx);
  270.     }
  271.  
  272.     for (; n < 32; n++)
  273.         state[n] = 0xff;
  274. }
  275.  
  276. /* Emit Reference List Entries (Gen6+: SNB, IVB) */
  277. static void
  278. gen6_send_avc_ref_idx_state_1(
  279.     struct intel_batchbuffer         *batch,
  280.     unsigned int                      list,
  281.     const VAPictureH264              *ref_list,
  282.     unsigned int                      ref_list_count,
  283.     const GenFrameStore               frame_store[MAX_GEN_REFERENCE_FRAMES]
  284. )
  285. {
  286.     uint8_t ref_idx_state[32];
  287.  
  288.     BEGIN_BCS_BATCH(batch, 10);
  289.     OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
  290.     OUT_BCS_BATCH(batch, list);
  291.     gen5_fill_avc_ref_idx_state(
  292.         ref_idx_state,
  293.         ref_list, ref_list_count,
  294.         frame_store
  295.     );
  296.     intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
  297.     ADVANCE_BCS_BATCH(batch);
  298. }
  299.  
  300. void
  301. gen6_send_avc_ref_idx_state(
  302.     struct intel_batchbuffer         *batch,
  303.     const VASliceParameterBufferH264 *slice_param,
  304.     const GenFrameStore               frame_store[MAX_GEN_REFERENCE_FRAMES]
  305. )
  306. {
  307.     if (slice_param->slice_type == SLICE_TYPE_I ||
  308.         slice_param->slice_type == SLICE_TYPE_SI)
  309.         return;
  310.  
  311.     /* RefPicList0 */
  312.     gen6_send_avc_ref_idx_state_1(
  313.         batch, 0,
  314.         slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
  315.         frame_store
  316.     );
  317.  
  318.     if (slice_param->slice_type != SLICE_TYPE_B)
  319.         return;
  320.  
  321.     /* RefPicList1 */
  322.     gen6_send_avc_ref_idx_state_1(
  323.         batch, 1,
  324.         slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
  325.         frame_store
  326.     );
  327. }
  328.