Subversion Repositories Kolibri OS

Rev

Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
3769 Serge 1
/*
2
 * Copyright © 2010 Intel Corporation
3
 *
4
 * Permission is hereby granted, free of charge, to any person obtaining a
5
 * copy of this software and associated documentation files (the
6
 * "Software"), to deal in the Software without restriction, including
7
 * without limitation the rights to use, copy, modify, merge, publish,
8
 * distribute, sub license, and/or sell copies of the Software, and to
9
 * permit persons to whom the Software is furnished to do so, subject to
10
 * the following conditions:
11
 *
12
 * The above copyright notice and this permission notice (including the
13
 * next paragraph) shall be included in all copies or substantial portions
14
 * of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17
 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19
 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20
 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21
 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22
 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
 *
24
 * Authors:
25
 *    Xiang Haihao 
26
 *
27
 */
28
#include 
29
#include 
30
#include 
31
#include 
32
 
33
#ifndef HAVE_GEN_AVC_SURFACE
34
#define HAVE_GEN_AVC_SURFACE 1
35
#endif
36
 
37
#include "intel_batchbuffer.h"
38
#include "intel_driver.h"
39
 
40
#include "i965_defines.h"
41
#include "i965_drv_video.h"
42
#include "i965_avc_bsd.h"
43
#include "i965_media_h264.h"
44
#include "i965_media.h"
45
#include "i965_decoder_utils.h"
46
 
47
static void
48
i965_avc_bsd_init_avc_bsd_surface(VADriverContextP ctx,
49
                                  struct object_surface *obj_surface,
50
                                  VAPictureParameterBufferH264 *pic_param,
51
                                  struct i965_h264_context *i965_h264_context)
52
{
53
    struct i965_driver_data *i965 = i965_driver_data(ctx);
54
    GenAvcSurface *avc_bsd_surface = obj_surface->private_data;
55
 
56
    obj_surface->free_private_data = gen_free_avc_surface;
57
 
58
    if (!avc_bsd_surface) {
59
        avc_bsd_surface = calloc(sizeof(GenAvcSurface), 1);
60
        assert((obj_surface->size & 0x3f) == 0);
61
        obj_surface->private_data = avc_bsd_surface;
62
    }
63
 
64
    avc_bsd_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
65
                                        !pic_param->seq_fields.bits.direct_8x8_inference_flag);
66
 
67
    if (avc_bsd_surface->dmv_top == NULL) {
68
        avc_bsd_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
69
                                                "direct mv w/r buffer",
70
                                                DMV_SIZE,
71
                                                0x1000);
72
    }
73
 
74
    if (avc_bsd_surface->dmv_bottom_flag &&
75
        avc_bsd_surface->dmv_bottom == NULL) {
76
        avc_bsd_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
77
                                                   "direct mv w/r buffer",
78
                                                   DMV_SIZE,
79
                                                   0x1000);
80
    }
81
}
82
 
83
static void
84
i965_bsd_ind_obj_base_address(VADriverContextP ctx,
85
                              struct decode_state *decode_state,
86
                              int slice,
87
                              struct i965_h264_context *i965_h264_context)
88
 
89
{
90
    struct intel_batchbuffer *batch = i965_h264_context->batch;
91
 
92
    dri_bo *ind_bo = decode_state->slice_datas[slice]->bo;
93
 
94
    BEGIN_BCS_BATCH(batch, 3);
95
    OUT_BCS_BATCH(batch, CMD_BSD_IND_OBJ_BASE_ADDR | (3 - 2));
96
    OUT_BCS_RELOC(batch, ind_bo,
97
                  I915_GEM_DOMAIN_INSTRUCTION, 0,
98
                  0);
99
    OUT_BCS_BATCH(batch, 0);
100
    ADVANCE_BCS_BATCH(batch);
101
}
102
 
103
static void
104
i965_avc_bsd_img_state(VADriverContextP ctx,
105
                       struct decode_state *decode_state,
106
                       struct i965_h264_context *i965_h264_context)
107
{
108
    struct intel_batchbuffer *batch = i965_h264_context->batch;
109
    int qm_present_flag;
110
    int img_struct;
111
    int mbaff_frame_flag;
112
    unsigned int avc_it_command_header;
113
    unsigned int width_in_mbs, height_in_mbs;
114
    VAPictureParameterBufferH264 *pic_param;
115
 
116
    if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
117
        qm_present_flag = 1;
118
    else
119
        qm_present_flag = 0; /* built-in QM matrices */
120
 
121
    assert(decode_state->pic_param && decode_state->pic_param->buffer);
122
    pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
123
 
124
    assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
125
 
126
    if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
127
        img_struct = 1;
128
    else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
129
        img_struct = 3;
130
    else
131
        img_struct = 0;
132
 
133
    if ((img_struct & 0x1) == 0x1) {
134
        assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
135
    } else {
136
        assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
137
    }
138
 
139
    if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
140
        assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
141
        assert(pic_param->pic_fields.bits.field_pic_flag == 0);
142
    } else {
143
        assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
144
    }
145
 
146
    mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
147
                        !pic_param->pic_fields.bits.field_pic_flag);
148
 
149
    width_in_mbs = ((pic_param->picture_width_in_mbs_minus1 + 1) & 0xff);
150
    height_in_mbs = ((pic_param->picture_height_in_mbs_minus1 + 1) & 0xff); /* frame height */
151
 
152
    assert(!((width_in_mbs * height_in_mbs) & 0x8000)); /* hardware requirement */
153
 
154
    /* BSD unit doesn't support 4:2:2 and 4:4:4 picture */
155
    assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
156
           pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
157
    assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
158
 
159
    avc_it_command_header = (CMD_MEDIA_OBJECT_EX | (12 - 2));
160
 
161
    BEGIN_BCS_BATCH(batch, 6);
162
    OUT_BCS_BATCH(batch, CMD_AVC_BSD_IMG_STATE | (6 - 2));
163
    OUT_BCS_BATCH(batch,
164
                  ((width_in_mbs * height_in_mbs) & 0x7fff));
165
    OUT_BCS_BATCH(batch,
166
                  (height_in_mbs << 16) |
167
                  (width_in_mbs << 0));
168
    OUT_BCS_BATCH(batch,
169
                  ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
170
                  ((pic_param->chroma_qp_index_offset & 0x1f) << 16) |
171
                  (SCAN_RASTER_ORDER << 15) | /* AVC ILDB Data */
172
                  (SCAN_SPECIAL_ORDER << 14) | /* AVC IT Command */
173
                  (SCAN_RASTER_ORDER << 13) | /* AVC IT Data */
174
                  (1 << 12) | /* always 1, hardware requirement */
175
                  (qm_present_flag << 10) |
176
                  (img_struct << 8) |
177
                  (16 << 0)); /* FIXME: always support 16 reference frames ??? */
178
    OUT_BCS_BATCH(batch,
179
                  (RESIDUAL_DATA_OFFSET << 24) | /* residual data offset */
180
                  (0 << 17) | /* don't overwrite SRT */
181
                  (0 << 16) | /* Un-SRT (Unsynchronized Root Thread) */
182
                  (0 << 12) | /* FIXME: no 16MV ??? */
183
                  (pic_param->seq_fields.bits.chroma_format_idc << 10) |
184
                  (i965_h264_context->enable_avc_ildb << 8)  | /* Enable ILDB writing output */
185
                  (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
186
                  ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
187
                  (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
188
                  (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
189
                  (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
190
                  (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
191
                  (mbaff_frame_flag << 1) |
192
                  (pic_param->pic_fields.bits.field_pic_flag << 0));
193
    OUT_BCS_BATCH(batch, avc_it_command_header);
194
    ADVANCE_BCS_BATCH(batch);
195
}
196
 
197
static void
198
i965_avc_bsd_qm_state(VADriverContextP ctx,
199
                      struct decode_state *decode_state,
200
                      struct i965_h264_context *i965_h264_context)
201
{
202
    struct intel_batchbuffer *batch = i965_h264_context->batch;
203
    int cmd_len;
204
    VAIQMatrixBufferH264 *iq_matrix;
205
    VAPictureParameterBufferH264 *pic_param;
206
 
207
    if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
208
        return;
209
 
210
    iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
211
 
212
    assert(decode_state->pic_param && decode_state->pic_param->buffer);
213
    pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
214
 
215
    cmd_len = 2 + 6 * 4; /* always load six 4x4 scaling matrices */
216
 
217
    if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
218
        cmd_len += 2 * 16; /* load two 8x8 scaling matrices */
219
 
220
    BEGIN_BCS_BATCH(batch, cmd_len);
221
    OUT_BCS_BATCH(batch, CMD_AVC_BSD_QM_STATE | (cmd_len - 2));
222
 
223
    if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
224
        OUT_BCS_BATCH(batch,
225
                      (0x0  << 8) | /* don't use default built-in matrices */
226
                      (0xff << 0)); /* six 4x4 and two 8x8 scaling matrices */
227
    else
228
        OUT_BCS_BATCH(batch,
229
                      (0x0  << 8) | /* don't use default built-in matrices */
230
                      (0x3f << 0)); /* six 4x4 scaling matrices */
231
 
232
    intel_batchbuffer_data(batch, &iq_matrix->ScalingList4x4[0][0], 6 * 4 * 4);
233
 
234
    if (pic_param->pic_fields.bits.transform_8x8_mode_flag)
235
        intel_batchbuffer_data(batch, &iq_matrix->ScalingList8x8[0][0], 2 * 16 * 4);
236
 
237
    ADVANCE_BCS_BATCH(batch);
238
}
239
 
240
static void
241
i965_avc_bsd_slice_state(VADriverContextP ctx,
242
                         VAPictureParameterBufferH264 *pic_param,
243
                         VASliceParameterBufferH264 *slice_param,
244
                         struct i965_h264_context *i965_h264_context)
245
{
246
    struct intel_batchbuffer *batch = i965_h264_context->batch;
247
    int present_flag, cmd_len, list, j;
248
    uint8_t ref_idx_state[32];
249
    char weightoffsets[32 * 6];
250
 
251
    /* don't issue SLICE_STATE for intra-prediction decoding */
252
    if (slice_param->slice_type == SLICE_TYPE_I ||
253
        slice_param->slice_type == SLICE_TYPE_SI)
254
        return;
255
 
256
    cmd_len = 2;
257
 
258
    if (slice_param->slice_type == SLICE_TYPE_P ||
259
        slice_param->slice_type == SLICE_TYPE_SP) {
260
        present_flag = PRESENT_REF_LIST0;
261
        cmd_len += 8;
262
    } else {
263
        present_flag = PRESENT_REF_LIST0 | PRESENT_REF_LIST1;
264
        cmd_len += 16;
265
    }
266
 
267
    if ((slice_param->slice_type == SLICE_TYPE_P ||
268
         slice_param->slice_type == SLICE_TYPE_SP) &&
269
        (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
270
        present_flag |= PRESENT_WEIGHT_OFFSET_L0;
271
        cmd_len += 48;
272
    }
273
 
274
    if ((slice_param->slice_type == SLICE_TYPE_B) &&
275
        (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
276
        present_flag |= PRESENT_WEIGHT_OFFSET_L0 | PRESENT_WEIGHT_OFFSET_L1;
277
        cmd_len += 96;
278
    }
279
 
280
    BEGIN_BCS_BATCH(batch, cmd_len);
281
    OUT_BCS_BATCH(batch, CMD_AVC_BSD_SLICE_STATE | (cmd_len - 2));
282
    OUT_BCS_BATCH(batch, present_flag);
283
 
284
    for (list = 0; list < 2; list++) {
285
        int flag, num_va_pics;
286
        VAPictureH264 *va_pic;
287
 
288
        if (list == 0) {
289
            flag        = PRESENT_REF_LIST0;
290
            va_pic      = slice_param->RefPicList0;
291
            num_va_pics = slice_param->num_ref_idx_l0_active_minus1 + 1;
292
        } else {
293
            flag        = PRESENT_REF_LIST1;
294
            va_pic      = slice_param->RefPicList1;
295
            num_va_pics = slice_param->num_ref_idx_l1_active_minus1 + 1;
296
        }
297
 
298
        if (!(present_flag & flag))
299
            continue;
300
 
301
        gen5_fill_avc_ref_idx_state(
302
            ref_idx_state,
303
            va_pic, num_va_pics,
304
            i965_h264_context->fsid_list
305
        );
306
        intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
307
    }
308
 
309
    i965_h264_context->weight128_luma_l0 = 0;
310
    i965_h264_context->weight128_luma_l1 = 0;
311
    i965_h264_context->weight128_chroma_l0 = 0;
312
    i965_h264_context->weight128_chroma_l1 = 0;
313
 
314
    i965_h264_context->weight128_offset0_flag = 0;
315
    i965_h264_context->weight128_offset0 = 0;
316
 
317
    if (present_flag & PRESENT_WEIGHT_OFFSET_L0) {
318
        for (j = 0; j < 32; j++) {
319
            weightoffsets[j * 6 + 0] = slice_param->luma_offset_l0[j];
320
            weightoffsets[j * 6 + 1] = slice_param->luma_weight_l0[j];
321
            weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l0[j][0];
322
            weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l0[j][0];
323
            weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l0[j][1];
324
            weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l0[j][1];
325
 
326
            if (pic_param->pic_fields.bits.weighted_pred_flag == 1 ||
327
                pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
328
                if (i965_h264_context->use_hw_w128) {
329
                    if (slice_param->luma_weight_l0[j] == 128)
330
                        i965_h264_context->weight128_luma_l0 |= (1 << j);
331
 
332
                    if (slice_param->chroma_weight_l0[j][0] == 128 ||
333
                        slice_param->chroma_weight_l0[j][1] == 128)
334
                        i965_h264_context->weight128_chroma_l0 |= (1 << j);
335
                } else {
336
                    /* FIXME: workaround for weight 128 */
337
                    if (slice_param->luma_weight_l0[j] == 128 ||
338
                        slice_param->chroma_weight_l0[j][0] == 128 ||
339
                        slice_param->chroma_weight_l0[j][1] == 128)
340
                        i965_h264_context->weight128_offset0_flag = 1;
341
                }
342
            }
343
        }
344
 
345
        intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
346
    }
347
 
348
    if (present_flag & PRESENT_WEIGHT_OFFSET_L1) {
349
        for (j = 0; j < 32; j++) {
350
            weightoffsets[j * 6 + 0] = slice_param->luma_offset_l1[j];
351
            weightoffsets[j * 6 + 1] = slice_param->luma_weight_l1[j];
352
            weightoffsets[j * 6 + 2] = slice_param->chroma_offset_l1[j][0];
353
            weightoffsets[j * 6 + 3] = slice_param->chroma_weight_l1[j][0];
354
            weightoffsets[j * 6 + 4] = slice_param->chroma_offset_l1[j][1];
355
            weightoffsets[j * 6 + 5] = slice_param->chroma_weight_l1[j][1];
356
 
357
            if (pic_param->pic_fields.bits.weighted_bipred_idc == 1) {
358
                if (i965_h264_context->use_hw_w128) {
359
                    if (slice_param->luma_weight_l1[j] == 128)
360
                        i965_h264_context->weight128_luma_l1 |= (1 << j);
361
 
362
                    if (slice_param->chroma_weight_l1[j][0] == 128 ||
363
                        slice_param->chroma_weight_l1[j][1] == 128)
364
                        i965_h264_context->weight128_chroma_l1 |= (1 << j);
365
                } else {
366
                    if (slice_param->luma_weight_l0[j] == 128 ||
367
                        slice_param->chroma_weight_l0[j][0] == 128 ||
368
                        slice_param->chroma_weight_l0[j][1] == 128)
369
                        i965_h264_context->weight128_offset0_flag = 1;
370
                }
371
            }
372
        }
373
 
374
        intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
375
    }
376
 
377
    ADVANCE_BCS_BATCH(batch);
378
}
379
 
380
static void
381
i965_avc_bsd_buf_base_state(VADriverContextP ctx,
382
                            VAPictureParameterBufferH264 *pic_param,
383
                            VASliceParameterBufferH264 *slice_param,
384
                            struct i965_h264_context *i965_h264_context)
385
{
386
    struct i965_driver_data *i965 = i965_driver_data(ctx);
387
    struct intel_batchbuffer *batch = i965_h264_context->batch;
388
    struct i965_avc_bsd_context *i965_avc_bsd_context;
389
    int i, j;
390
    VAPictureH264 *va_pic;
391
    struct object_surface *obj_surface;
392
    GenAvcSurface *avc_bsd_surface;
393
 
394
    i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
395
 
396
    BEGIN_BCS_BATCH(batch, 74);
397
    OUT_BCS_BATCH(batch, CMD_AVC_BSD_BUF_BASE_STATE | (74 - 2));
398
    OUT_BCS_RELOC(batch, i965_avc_bsd_context->bsd_raw_store.bo,
399
                  I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
400
                  0);
401
    OUT_BCS_RELOC(batch, i965_avc_bsd_context->mpr_row_store.bo,
402
                  I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
403
                  0);
404
    OUT_BCS_RELOC(batch, i965_h264_context->avc_it_command_mb_info.bo,
405
                  I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
406
                  i965_h264_context->avc_it_command_mb_info.mbs * i965_h264_context->use_avc_hw_scoreboard * MB_CMD_IN_BYTES);
407
    OUT_BCS_RELOC(batch, i965_h264_context->avc_it_data.bo,
408
                  I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
409
                  (i965_h264_context->avc_it_data.write_offset << 6));
410
 
411
    if (i965_h264_context->enable_avc_ildb)
412
        OUT_BCS_RELOC(batch, i965_h264_context->avc_ildb_data.bo,
413
                      I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
414
                      0);
415
    else
416
        OUT_BCS_BATCH(batch, 0);
417
 
418
    for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
419
        if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
420
            int found = 0;
421
            for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
422
                va_pic = &pic_param->ReferenceFrames[j];
423
 
424
                if (va_pic->flags & VA_PICTURE_H264_INVALID)
425
                    continue;
426
 
427
                if (va_pic->picture_id == i965_h264_context->fsid_list[i].surface_id) {
428
                    found = 1;
429
                    break;
430
                }
431
            }
432
 
433
            assert(found == 1);
434
 
435
            if (!(va_pic->flags & VA_PICTURE_H264_INVALID)) {
436
                obj_surface = SURFACE(va_pic->picture_id);
437
                assert(obj_surface);
438
                avc_bsd_surface = obj_surface->private_data;
439
 
440
                if (avc_bsd_surface == NULL) {
441
                    OUT_BCS_BATCH(batch, 0);
442
                    OUT_BCS_BATCH(batch, 0);
443
                } else {
444
                    OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
445
                                  I915_GEM_DOMAIN_INSTRUCTION, 0,
446
                                  0);
447
 
448
                    if (avc_bsd_surface->dmv_bottom_flag == 1)
449
                        OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_bottom,
450
                                      I915_GEM_DOMAIN_INSTRUCTION, 0,
451
                                      0);
452
                    else
453
                        OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
454
                                      I915_GEM_DOMAIN_INSTRUCTION, 0,
455
                                      0);
456
                }
457
            }
458
        } else {
459
            OUT_BCS_BATCH(batch, 0);
460
            OUT_BCS_BATCH(batch, 0);
461
        }
462
    }
463
 
464
    va_pic = &pic_param->CurrPic;
465
    assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
466
    obj_surface = SURFACE(va_pic->picture_id);
467
    assert(obj_surface);
468
    obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
469
    obj_surface->flags |= (pic_param->pic_fields.bits.reference_pic_flag ? SURFACE_REFERENCED : 0);
470
    i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
471
 
472
    /* initial uv component for YUV400 case */
473
    if (pic_param->seq_fields.bits.chroma_format_idc == 0) {
474
         unsigned int uv_offset = obj_surface->width * obj_surface->height;
475
         unsigned int uv_size   = obj_surface->width * obj_surface->height / 2;
476
 
477
         dri_bo_map(obj_surface->bo, 1);
478
         memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
479
         dri_bo_unmap(obj_surface->bo);
480
    }
481
 
482
    i965_avc_bsd_init_avc_bsd_surface(ctx, obj_surface, pic_param, i965_h264_context);
483
    avc_bsd_surface = obj_surface->private_data;
484
 
485
    OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
486
                  I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
487
                  0);
488
 
489
    if (avc_bsd_surface->dmv_bottom_flag == 1)
490
        OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_bottom,
491
                      I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
492
                      0);
493
    else
494
        OUT_BCS_RELOC(batch, avc_bsd_surface->dmv_top,
495
                      I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
496
                      0);
497
 
498
    /* POC List */
499
    for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
500
        if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID) {
501
            int found = 0;
502
            for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
503
                va_pic = &pic_param->ReferenceFrames[j];
504
 
505
                if (va_pic->flags & VA_PICTURE_H264_INVALID)
506
                    continue;
507
 
508
                if (va_pic->picture_id == i965_h264_context->fsid_list[i].surface_id) {
509
                    found = 1;
510
                    break;
511
                }
512
            }
513
 
514
            assert(found == 1);
515
 
516
            if (!(va_pic->flags & VA_PICTURE_H264_INVALID)) {
517
                OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
518
                OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
519
            }
520
        } else {
521
            OUT_BCS_BATCH(batch, 0);
522
            OUT_BCS_BATCH(batch, 0);
523
        }
524
    }
525
 
526
    va_pic = &pic_param->CurrPic;
527
    OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
528
    OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
529
 
530
    ADVANCE_BCS_BATCH(batch);
531
}
532
 
533
static void
534
g4x_avc_bsd_object(VADriverContextP ctx,
535
                   struct decode_state *decode_state,
536
                   VAPictureParameterBufferH264 *pic_param,
537
                   VASliceParameterBufferH264 *slice_param,
538
                   int slice_index,
539
                   struct i965_h264_context *i965_h264_context)
540
{
541
    struct intel_batchbuffer *batch = i965_h264_context->batch;
542
    int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
543
    int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
544
 
545
    if (slice_param) {
546
        int encrypted, counter_value, cmd_len;
547
        int slice_hor_pos, slice_ver_pos;
548
        int num_ref_idx_l0, num_ref_idx_l1;
549
        int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
550
                             pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
551
        unsigned int slice_data_bit_offset;
552
        int weighted_pred_idc = 0;
553
        int first_mb_in_slice = 0;
554
        int slice_type;
555
 
556
        encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
557
 
558
        if (encrypted) {
559
            cmd_len = 9;
560
            counter_value = 0; /* FIXME: ??? */
561
        } else
562
            cmd_len = 8;
563
 
564
 
565
        slice_data_bit_offset = avc_get_first_mb_bit_offset_with_epb(
566
            decode_state->slice_datas[slice_index]->bo,
567
            slice_param,
568
            pic_param->pic_fields.bits.entropy_coding_mode_flag
569
        );
570
 
571
        if (slice_param->slice_type == SLICE_TYPE_I ||
572
            slice_param->slice_type == SLICE_TYPE_SI)
573
            slice_type = SLICE_TYPE_I;
574
        else if (slice_param->slice_type == SLICE_TYPE_P ||
575
                 slice_param->slice_type == SLICE_TYPE_SP)
576
            slice_type = SLICE_TYPE_P;
577
        else {
578
            assert(slice_param->slice_type == SLICE_TYPE_B);
579
            slice_type = SLICE_TYPE_B;
580
        }
581
 
582
        if (slice_type == SLICE_TYPE_I) {
583
            assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
584
            assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
585
            num_ref_idx_l0 = 0;
586
            num_ref_idx_l1 = 0;
587
        } else if (slice_type == SLICE_TYPE_P) {
588
            assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
589
            num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
590
            num_ref_idx_l1 = 0;
591
        } else {
592
            num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
593
            num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
594
        }
595
 
596
        if (slice_type == SLICE_TYPE_P)
597
            weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
598
        else if (slice_type == SLICE_TYPE_B)
599
            weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
600
 
601
        first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
602
        slice_hor_pos = first_mb_in_slice % width_in_mbs;
603
        slice_ver_pos = first_mb_in_slice / width_in_mbs;
604
 
605
        BEGIN_BCS_BATCH(batch, cmd_len);
606
        OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (cmd_len - 2));
607
        OUT_BCS_BATCH(batch,
608
                      (encrypted << 31) |
609
                      ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
610
        OUT_BCS_BATCH(batch,
611
                      (slice_param->slice_data_offset +
612
                       (slice_data_bit_offset >> 3)));
613
        OUT_BCS_BATCH(batch,
614
                      (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
615
                      (0 << 14) | /* ignore BSDPrematureComplete Error handling */
616
                      (0 << 13) | /* FIXME: ??? */
617
                      (0 << 12) | /* ignore MPR Error handling */
618
                      (0 << 10) | /* ignore Entropy Error handling */
619
                      (0 << 8)  | /* ignore MB Header Error handling */
620
                      (slice_type << 0));
621
        OUT_BCS_BATCH(batch,
622
                      (num_ref_idx_l1 << 24) |
623
                      (num_ref_idx_l0 << 16) |
624
                      (slice_param->chroma_log2_weight_denom << 8) |
625
                      (slice_param->luma_log2_weight_denom << 0));
626
        OUT_BCS_BATCH(batch,
627
                      (weighted_pred_idc << 30) |
628
                      (slice_param->direct_spatial_mv_pred_flag << 29) |
629
                      (slice_param->disable_deblocking_filter_idc << 27) |
630
                      (slice_param->cabac_init_idc << 24) |
631
                      ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
632
                      ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
633
                      ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
634
        OUT_BCS_BATCH(batch,
635
                      (slice_ver_pos << 24) |
636
                      (slice_hor_pos << 16) |
637
                      (first_mb_in_slice << 0));
638
        OUT_BCS_BATCH(batch,
639
                      (1 << 7) |
640
                      ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
641
 
642
        if (encrypted) {
643
            OUT_BCS_BATCH(batch, counter_value);
644
        }
645
 
646
        ADVANCE_BCS_BATCH(batch);
647
    } else {
648
        BEGIN_BCS_BATCH(batch, 8);
649
        OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (8 - 2));
650
        OUT_BCS_BATCH(batch, 0); /* indirect data length for phantom slice is 0 */
651
        OUT_BCS_BATCH(batch, 0); /* indirect data start address for phantom slice is 0 */
652
        OUT_BCS_BATCH(batch, 0);
653
        OUT_BCS_BATCH(batch, 0);
654
        OUT_BCS_BATCH(batch, 0);
655
        OUT_BCS_BATCH(batch, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
656
        OUT_BCS_BATCH(batch, 0);
657
        ADVANCE_BCS_BATCH(batch);
658
    }
659
}
660
 
661
static void
662
ironlake_avc_bsd_object(VADriverContextP ctx,
663
                        struct decode_state *decode_state,
664
                        VAPictureParameterBufferH264 *pic_param,
665
                        VASliceParameterBufferH264 *slice_param,
666
                        int slice_index,
667
                        struct i965_h264_context *i965_h264_context)
668
{
669
    struct intel_batchbuffer *batch = i965_h264_context->batch;
670
    int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
671
    int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
672
 
673
    if (slice_param) {
674
        int encrypted, counter_value;
675
        int slice_hor_pos, slice_ver_pos;
676
        int num_ref_idx_l0, num_ref_idx_l1;
677
        int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
678
                             pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
679
        unsigned int slice_data_bit_offset;
680
        int weighted_pred_idc = 0;
681
        int first_mb_in_slice;
682
        int slice_type;
683
 
684
        encrypted = 0; /* FIXME: which flag in VAAPI is used for encryption? */
685
 
686
        if (encrypted) {
687
            counter_value = 0; /* FIXME: ??? */
688
        } else
689
            counter_value = 0;
690
 
691
        slice_data_bit_offset = avc_get_first_mb_bit_offset_with_epb(
692
            decode_state->slice_datas[slice_index]->bo,
693
            slice_param,
694
            pic_param->pic_fields.bits.entropy_coding_mode_flag
695
        );
696
 
697
        if (slice_param->slice_type == SLICE_TYPE_I ||
698
            slice_param->slice_type == SLICE_TYPE_SI)
699
            slice_type = SLICE_TYPE_I;
700
        else if (slice_param->slice_type == SLICE_TYPE_P ||
701
                 slice_param->slice_type == SLICE_TYPE_SP)
702
            slice_type = SLICE_TYPE_P;
703
        else {
704
            assert(slice_param->slice_type == SLICE_TYPE_B);
705
            slice_type = SLICE_TYPE_B;
706
        }
707
 
708
        if (slice_type == SLICE_TYPE_I) {
709
            assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
710
            assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
711
            num_ref_idx_l0 = 0;
712
            num_ref_idx_l1 = 0;
713
        } else if (slice_type == SLICE_TYPE_P) {
714
            assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
715
            num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
716
            num_ref_idx_l1 = 0;
717
        } else {
718
            num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
719
            num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
720
        }
721
 
722
        if (slice_type == SLICE_TYPE_P)
723
            weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
724
        else if (slice_type == SLICE_TYPE_B)
725
            weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
726
 
727
        first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
728
        slice_hor_pos = first_mb_in_slice % width_in_mbs;
729
        slice_ver_pos = first_mb_in_slice / width_in_mbs;
730
 
731
        BEGIN_BCS_BATCH(batch, 16);
732
        OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (16 - 2));
733
        OUT_BCS_BATCH(batch,
734
                      (encrypted << 31) |
735
                      (0 << 30) | /* FIXME: packet based bit stream */
736
                      (0 << 29) | /* FIXME: packet format */
737
                      ((slice_param->slice_data_size - (slice_data_bit_offset >> 3)) << 0));
738
        OUT_BCS_BATCH(batch,
739
                      (slice_param->slice_data_offset +
740
                       (slice_data_bit_offset >> 3)));
741
        OUT_BCS_BATCH(batch,
742
                      (0 << 31) | /* concealment mode: 0->intra 16x16 prediction, 1->inter P Copy */
743
                      (0 << 14) | /* ignore BSDPrematureComplete Error handling */
744
                      (0 << 13) | /* FIXME: ??? */
745
                      (0 << 12) | /* ignore MPR Error handling */
746
                      (0 << 10) | /* ignore Entropy Error handling */
747
                      (0 << 8)  | /* ignore MB Header Error handling */
748
                      (slice_type << 0));
749
        OUT_BCS_BATCH(batch,
750
                      (num_ref_idx_l1 << 24) |
751
                      (num_ref_idx_l0 << 16) |
752
                      (slice_param->chroma_log2_weight_denom << 8) |
753
                      (slice_param->luma_log2_weight_denom << 0));
754
        OUT_BCS_BATCH(batch,
755
                      (weighted_pred_idc << 30) |
756
                      (slice_param->direct_spatial_mv_pred_flag << 29) |
757
                      (slice_param->disable_deblocking_filter_idc << 27) |
758
                      (slice_param->cabac_init_idc << 24) |
759
                      ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
760
                      ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
761
                      ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
762
        OUT_BCS_BATCH(batch,
763
                      (slice_ver_pos << 24) |
764
                      (slice_hor_pos << 16) |
765
                      (first_mb_in_slice << 0));
766
        OUT_BCS_BATCH(batch,
767
                      (1 << 7) |
768
                      ((0x7 - (slice_data_bit_offset & 0x7)) << 0));
769
        OUT_BCS_BATCH(batch, counter_value);
770
 
771
        /* FIXME: dw9-dw11 */
772
        OUT_BCS_BATCH(batch, 0);
773
        OUT_BCS_BATCH(batch, 0);
774
        OUT_BCS_BATCH(batch, 0);
775
        OUT_BCS_BATCH(batch, i965_h264_context->weight128_luma_l0);
776
        OUT_BCS_BATCH(batch, i965_h264_context->weight128_luma_l1);
777
        OUT_BCS_BATCH(batch, i965_h264_context->weight128_chroma_l0);
778
        OUT_BCS_BATCH(batch, i965_h264_context->weight128_chroma_l1);
779
 
780
        ADVANCE_BCS_BATCH(batch);
781
    } else {
782
        BEGIN_BCS_BATCH(batch, 16);
783
        OUT_BCS_BATCH(batch, CMD_AVC_BSD_OBJECT | (16 - 2));
784
        OUT_BCS_BATCH(batch, 0); /* indirect data length for phantom slice is 0 */
785
        OUT_BCS_BATCH(batch, 0); /* indirect data start address for phantom slice is 0 */
786
        OUT_BCS_BATCH(batch, 0);
787
        OUT_BCS_BATCH(batch, 0);
788
        OUT_BCS_BATCH(batch, 0);
789
        OUT_BCS_BATCH(batch, width_in_mbs * height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag));
790
        OUT_BCS_BATCH(batch, 0);
791
        OUT_BCS_BATCH(batch, 0);
792
        OUT_BCS_BATCH(batch, 0);
793
        OUT_BCS_BATCH(batch, 0);
794
        OUT_BCS_BATCH(batch, 0);
795
        OUT_BCS_BATCH(batch, 0);
796
        OUT_BCS_BATCH(batch, 0);
797
        OUT_BCS_BATCH(batch, 0);
798
        OUT_BCS_BATCH(batch, 0);
799
        ADVANCE_BCS_BATCH(batch);
800
    }
801
}
802
 
803
static void
804
i965_avc_bsd_object(VADriverContextP ctx,
805
                    struct decode_state *decode_state,
806
                    VAPictureParameterBufferH264 *pic_param,
807
                    VASliceParameterBufferH264 *slice_param,
808
                    int slice_index,
809
                    struct i965_h264_context *i965_h264_context)
810
{
811
    struct i965_driver_data *i965 = i965_driver_data(ctx);
812
 
813
    if (IS_IRONLAKE(i965->intel.device_id))
814
        ironlake_avc_bsd_object(ctx, decode_state, pic_param, slice_param, slice_index, i965_h264_context);
815
    else
816
        g4x_avc_bsd_object(ctx, decode_state, pic_param, slice_param, slice_index, i965_h264_context);
817
}
818
 
819
static void
820
i965_avc_bsd_phantom_slice(VADriverContextP ctx,
821
                           struct decode_state *decode_state,
822
                           VAPictureParameterBufferH264 *pic_param,
823
                           struct i965_h264_context *i965_h264_context)
824
{
825
    i965_avc_bsd_object(ctx, decode_state, pic_param, NULL, 0, i965_h264_context);
826
}
827
 
828
static void
829
i965_avc_bsd_frame_store_index(VADriverContextP ctx,
830
                               VAPictureParameterBufferH264 *pic_param,
831
                               struct i965_h264_context *i965_h264_context)
832
{
833
    struct i965_driver_data *i965 = i965_driver_data(ctx);
834
    int i, j;
835
 
836
    assert(ARRAY_ELEMS(i965_h264_context->fsid_list) == ARRAY_ELEMS(pic_param->ReferenceFrames));
837
 
838
    for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list); i++) {
839
        int found = 0;
840
 
841
        if (i965_h264_context->fsid_list[i].surface_id == VA_INVALID_ID)
842
            continue;
843
 
844
        for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
845
            VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[j];
846
            if (ref_pic->flags & VA_PICTURE_H264_INVALID)
847
                continue;
848
 
849
            if (i965_h264_context->fsid_list[i].surface_id == ref_pic->picture_id) {
850
                found = 1;
851
                break;
852
            }
853
        }
854
 
855
        if (!found) {
856
            struct object_surface *obj_surface = SURFACE(i965_h264_context->fsid_list[i].surface_id);
857
            obj_surface->flags &= ~SURFACE_REFERENCED;
858
 
859
            if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
860
                dri_bo_unreference(obj_surface->bo);
861
                obj_surface->bo = NULL;
862
                obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
863
            }
864
 
865
            if (obj_surface->free_private_data)
866
                obj_surface->free_private_data(&obj_surface->private_data);
867
 
868
            i965_h264_context->fsid_list[i].surface_id = VA_INVALID_ID;
869
            i965_h264_context->fsid_list[i].frame_store_id = -1;
870
        }
871
    }
872
 
873
    for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
874
        VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[i];
875
        int found = 0;
876
 
877
        if (ref_pic->flags & VA_PICTURE_H264_INVALID)
878
            continue;
879
 
880
        for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
881
            if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
882
                continue;
883
 
884
            if (i965_h264_context->fsid_list[j].surface_id == ref_pic->picture_id) {
885
                found = 1;
886
                break;
887
            }
888
        }
889
 
890
        if (!found) {
891
            int frame_idx;
892
            struct object_surface *obj_surface = SURFACE(ref_pic->picture_id);
893
            assert(obj_surface);
894
            i965_check_alloc_surface_bo(ctx, obj_surface, 0, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
895
 
896
            for (frame_idx = 0; frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list); frame_idx++) {
897
                for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
898
                    if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID)
899
                        continue;
900
 
901
                    if (i965_h264_context->fsid_list[j].frame_store_id == frame_idx)
902
                        break;
903
                }
904
 
905
                if (j == ARRAY_ELEMS(i965_h264_context->fsid_list))
906
                    break;
907
            }
908
 
909
            assert(frame_idx < ARRAY_ELEMS(i965_h264_context->fsid_list));
910
 
911
            for (j = 0; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
912
                if (i965_h264_context->fsid_list[j].surface_id == VA_INVALID_ID) {
913
                    i965_h264_context->fsid_list[j].surface_id = ref_pic->picture_id;
914
                    i965_h264_context->fsid_list[j].frame_store_id = frame_idx;
915
                    break;
916
                }
917
            }
918
        }
919
    }
920
 
921
    for (i = 0; i < ARRAY_ELEMS(i965_h264_context->fsid_list) - 1; i++) {
922
        if (i965_h264_context->fsid_list[i].surface_id != VA_INVALID_ID &&
923
            i965_h264_context->fsid_list[i].frame_store_id == i)
924
            continue;
925
 
926
        for (j = i + 1; j < ARRAY_ELEMS(i965_h264_context->fsid_list); j++) {
927
            if (i965_h264_context->fsid_list[j].surface_id != VA_INVALID_ID &&
928
                i965_h264_context->fsid_list[j].frame_store_id == i) {
929
                VASurfaceID id = i965_h264_context->fsid_list[i].surface_id;
930
                int frame_idx = i965_h264_context->fsid_list[i].frame_store_id;
931
 
932
                i965_h264_context->fsid_list[i].surface_id = i965_h264_context->fsid_list[j].surface_id;
933
                i965_h264_context->fsid_list[i].frame_store_id = i965_h264_context->fsid_list[j].frame_store_id;
934
                i965_h264_context->fsid_list[j].surface_id = id;
935
                i965_h264_context->fsid_list[j].frame_store_id = frame_idx;
936
                break;
937
            }
938
        }
939
    }
940
}
941
 
942
void
943
i965_avc_bsd_pipeline(VADriverContextP ctx, struct decode_state *decode_state, void *h264_context)
944
{
945
    struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
946
    struct intel_batchbuffer *batch = i965_h264_context->batch;
947
    VAPictureParameterBufferH264 *pic_param;
948
    VASliceParameterBufferH264 *slice_param;
949
    int i, j;
950
 
951
    assert(decode_state->pic_param && decode_state->pic_param->buffer);
952
    pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
953
    i965_avc_bsd_frame_store_index(ctx, pic_param, i965_h264_context);
954
 
955
    i965_h264_context->enable_avc_ildb = 0;
956
    i965_h264_context->picture.i_flag = 1;
957
 
958
    for (j = 0; j < decode_state->num_slice_params && i965_h264_context->enable_avc_ildb == 0; j++) {
959
        assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
960
        slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
961
 
962
        for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
963
            assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
964
            assert((slice_param->slice_type == SLICE_TYPE_I) ||
965
                   (slice_param->slice_type == SLICE_TYPE_SI) ||
966
                   (slice_param->slice_type == SLICE_TYPE_P) ||
967
                   (slice_param->slice_type == SLICE_TYPE_SP) ||
968
                   (slice_param->slice_type == SLICE_TYPE_B));
969
 
970
            if (slice_param->disable_deblocking_filter_idc != 1) {
971
                i965_h264_context->enable_avc_ildb = 1;
972
                break;
973
            }
974
 
975
            slice_param++;
976
        }
977
    }
978
 
979
    intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
980
 
981
    i965_avc_bsd_img_state(ctx, decode_state, i965_h264_context);
982
    i965_avc_bsd_qm_state(ctx, decode_state, i965_h264_context);
983
 
984
    for (j = 0; j < decode_state->num_slice_params; j++) {
985
        assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
986
        slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
987
 
988
        i965_bsd_ind_obj_base_address(ctx, decode_state, j, i965_h264_context);
989
 
990
        for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
991
            assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
992
            assert((slice_param->slice_type == SLICE_TYPE_I) ||
993
                   (slice_param->slice_type == SLICE_TYPE_SI) ||
994
                   (slice_param->slice_type == SLICE_TYPE_P) ||
995
                   (slice_param->slice_type == SLICE_TYPE_SP) ||
996
                   (slice_param->slice_type == SLICE_TYPE_B));
997
 
998
            if (i965_h264_context->picture.i_flag &&
999
                (slice_param->slice_type != SLICE_TYPE_I ||
1000
                 slice_param->slice_type != SLICE_TYPE_SI))
1001
                i965_h264_context->picture.i_flag = 0;
1002
 
1003
            i965_avc_bsd_slice_state(ctx, pic_param, slice_param, i965_h264_context);
1004
            i965_avc_bsd_buf_base_state(ctx, pic_param, slice_param, i965_h264_context);
1005
            i965_avc_bsd_object(ctx, decode_state, pic_param, slice_param, j, i965_h264_context);
1006
            slice_param++;
1007
        }
1008
    }
1009
 
1010
    i965_avc_bsd_phantom_slice(ctx, decode_state, pic_param, i965_h264_context);
1011
    intel_batchbuffer_emit_mi_flush(batch);
1012
    intel_batchbuffer_end_atomic(batch);
1013
    intel_batchbuffer_flush(batch);
1014
}
1015
 
1016
void
1017
i965_avc_bsd_decode_init(VADriverContextP ctx, void *h264_context)
1018
{
1019
    struct i965_driver_data *i965 = i965_driver_data(ctx);
1020
    struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
1021
    struct i965_avc_bsd_context *i965_avc_bsd_context;
1022
    dri_bo *bo;
1023
 
1024
    assert(i965_h264_context);
1025
    i965_avc_bsd_context = &i965_h264_context->i965_avc_bsd_context;
1026
 
1027
    dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
1028
    bo = dri_bo_alloc(i965->intel.bufmgr,
1029
                      "bsd raw store",
1030
                      0x3000, /* at least 11520 bytes to support 120 MBs per row */
1031
                      64);
1032
    assert(bo);
1033
    i965_avc_bsd_context->bsd_raw_store.bo = bo;
1034
 
1035
    dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
1036
    bo = dri_bo_alloc(i965->intel.bufmgr,
1037
                      "mpr row store",
1038
                      0x2000, /* at least 7680 bytes to support 120 MBs per row */
1039
                      64);
1040
    assert(bo);
1041
    i965_avc_bsd_context->mpr_row_store.bo = bo;
1042
}
1043
 
1044
Bool
1045
i965_avc_bsd_ternimate(struct i965_avc_bsd_context *i965_avc_bsd_context)
1046
{
1047
    dri_bo_unreference(i965_avc_bsd_context->bsd_raw_store.bo);
1048
    dri_bo_unreference(i965_avc_bsd_context->mpr_row_store.bo);
1049
 
1050
    return True;
1051
}