Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | Download | RSS feed

  1. /*
  2.  * Copyright © 2009 Intel Corporation
  3.  *
  4.  * Permission is hereby granted, free of charge, to any person obtaining a
  5.  * copy of this software and associated documentation files (the
  6.  * "Software"), to deal in the Software without restriction, including
  7.  * without limitation the rights to use, copy, modify, merge, publish,
  8.  * distribute, sub license, and/or sell copies of the Software, and to
  9.  * permit persons to whom the Software is furnished to do so, subject to
  10.  * the following conditions:
  11.  *
  12.  * The above copyright notice and this permission notice (including the
  13.  * next paragraph) shall be included in all copies or substantial portions
  14.  * of the Software.
  15.  *
  16.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  17.  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  18.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  19.  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
  20.  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  21.  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  22.  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  23.  *
  24.  * Authors:
  25.  *    Xiang Haihao <haihao.xiang@intel.com>
  26.  *    Zou Nan hai <nanhai.zou@intel.com>
  27.  *
  28.  */
  29.  
  30. #include "sysdeps.h"
  31.  
  32. #ifdef HAVE_VA_X11
  33. # include "i965_output_dri.h"
  34. #endif
  35.  
  36. #ifdef HAVE_VA_WAYLAND
  37. # include "i965_output_wayland.h"
  38. #endif
  39.  
  40. #include "intel_driver.h"
  41. #include "intel_memman.h"
  42. #include "intel_batchbuffer.h"
  43. #include "i965_defines.h"
  44. #include "i965_drv_video.h"
  45. #include "i965_decoder.h"
  46. #include "i965_encoder.h"
  47.  
  48. #define VA_DRIVER_INIT_FUNC __vaDriverInit_0_32
  49.  
  50. #define INTEL_DRIVER_MAJOR_VERSION 1
  51. #define INTEL_DRIVER_MINOR_VERSION 0
  52. #define INTEL_DRIVER_MICRO_VERSION 20
  53. #define INTEL_DRIVER_PRE_VERSION   1
  54.  
  55. #define CONFIG_ID_OFFSET                0x01000000
  56. #define CONTEXT_ID_OFFSET               0x02000000
  57. #define SURFACE_ID_OFFSET               0x04000000
  58. #define BUFFER_ID_OFFSET                0x08000000
  59. #define IMAGE_ID_OFFSET                 0x0a000000
  60. #define SUBPIC_ID_OFFSET                0x10000000
  61.  
  62. #define HAS_MPEG2(ctx)  (IS_G4X((ctx)->intel.device_id) ||      \
  63.                          IS_IRONLAKE((ctx)->intel.device_id) || \
  64.                          ((IS_GEN6((ctx)->intel.device_id) ||   \
  65.                            IS_GEN7((ctx)->intel.device_id)) &&  \
  66.                           (ctx)->intel.has_bsd))
  67.  
  68. #define HAS_H264(ctx)   ((IS_GEN7((ctx)->intel.device_id) ||            \
  69.                           IS_GEN6((ctx)->intel.device_id) ||            \
  70.                           IS_IRONLAKE((ctx)->intel.device_id)) &&       \
  71.                          (ctx)->intel.has_bsd)
  72.  
  73. #define HAS_VC1(ctx)    ((IS_GEN7((ctx)->intel.device_id) ||    \
  74.                           IS_GEN6((ctx)->intel.device_id)) &&   \
  75.                          (ctx)->intel.has_bsd)
  76.  
  77. #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) ||     \
  78.                                  IS_GEN6((ctx)->intel.device_id)) &&    \
  79.                                 (ctx)->render_state.interleaved_uv)
  80.  
  81. #define HAS_ENCODER(ctx)        ((IS_GEN7((ctx)->intel.device_id) ||    \
  82.                                   IS_GEN6((ctx)->intel.device_id)) &&   \
  83.                                  (ctx)->intel.has_bsd)
  84.  
  85. #define HAS_JPEG(ctx)   (IS_GEN7((ctx)->intel.device_id) &&     \
  86.                          (ctx)->intel.has_bsd)
  87.  
  88. #define HAS_ACCELERATED_GETIMAGE(ctx)   (IS_GEN6((ctx)->intel.device_id) ||     \
  89.                                          IS_GEN7((ctx)->intel.device_id))
  90.  
  91. #define HAS_ACCELERATED_PUTIMAGE(ctx)   HAS_VPP(ctx)
  92.  
  93. #if VA_CHECK_VERSION(0,33,0)
  94. /* Check whether we are rendering to X11 (VA/X11 or VA/GLX API) */
  95. #define IS_VA_X11(ctx) \
  96.     (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_X11)
  97.  
  98. /* Check whether we are rendering to Wayland */
  99. #define IS_VA_WAYLAND(ctx) \
  100.     (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_WAYLAND)
  101. #else
  102. /* Previous VA-API versions only supported VA/X11 (and VA/GLX) API */
  103. #define IS_VA_X11(ctx)          1
  104. #define IS_VA_WAYLAND(ctx)      0
  105. #endif
  106.  
  107. enum {
  108.     I965_SURFACETYPE_RGBA = 1,
  109.     I965_SURFACETYPE_YUV,
  110.     I965_SURFACETYPE_INDEXED
  111. };
  112.  
  113. /* List of supported display attributes */
  114. static const VADisplayAttribute i965_display_attributes[] = {
  115.     {
  116.         VADisplayAttribRotation,
  117.         0, 3, VA_ROTATION_NONE,
  118.         VA_DISPLAY_ATTRIB_GETTABLE|VA_DISPLAY_ATTRIB_SETTABLE
  119.     },
  120. };
  121.  
  122. /* List of supported image formats */
  123. typedef struct {
  124.     unsigned int        type;
  125.     VAImageFormat       va_format;
  126. } i965_image_format_map_t;
  127.  
  128. static const i965_image_format_map_t
  129. i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
  130.     { I965_SURFACETYPE_YUV,
  131.       { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
  132.     { I965_SURFACETYPE_YUV,
  133.       { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
  134.     { I965_SURFACETYPE_YUV,
  135.       { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
  136. };
  137.  
  138. /* List of supported subpicture formats */
  139. typedef struct {
  140.     unsigned int        type;
  141.     unsigned int        format;
  142.     VAImageFormat       va_format;
  143.     unsigned int        va_flags;
  144. } i965_subpic_format_map_t;
  145.  
  146. #define COMMON_SUBPICTURE_FLAGS                 \
  147.     (VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD| \
  148.      VA_SUBPICTURE_GLOBAL_ALPHA)
  149.  
  150. static const i965_subpic_format_map_t
  151. i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
  152.     { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
  153.       { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
  154.       COMMON_SUBPICTURE_FLAGS },
  155.     { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
  156.       { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
  157.       COMMON_SUBPICTURE_FLAGS },
  158.     { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P8A8_UNORM,
  159.       { VA_FOURCC('I','A','8','8'), VA_MSB_FIRST, 16, },
  160.       COMMON_SUBPICTURE_FLAGS },
  161.     { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A8P8_UNORM,
  162.       { VA_FOURCC('A','I','8','8'), VA_MSB_FIRST, 16, },
  163.       COMMON_SUBPICTURE_FLAGS },
  164.      { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
  165.       { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
  166.         32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
  167.       COMMON_SUBPICTURE_FLAGS },
  168.     { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
  169.       { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
  170.         32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
  171.       COMMON_SUBPICTURE_FLAGS },
  172. };
  173.  
  174. static const i965_subpic_format_map_t *
  175. get_subpic_format(const VAImageFormat *va_format)
  176. {
  177.     unsigned int i;
  178.     for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
  179.         const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
  180.         if (m->va_format.fourcc == va_format->fourcc &&
  181.             (m->type == I965_SURFACETYPE_RGBA ?
  182.              (m->va_format.byte_order == va_format->byte_order &&
  183.               m->va_format.red_mask   == va_format->red_mask   &&
  184.               m->va_format.green_mask == va_format->green_mask &&
  185.               m->va_format.blue_mask  == va_format->blue_mask  &&
  186.               m->va_format.alpha_mask == va_format->alpha_mask) : 1))
  187.             return m;
  188.     }
  189.     return NULL;
  190. }
  191.  
  192. extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
  193. static struct hw_codec_info g4x_hw_codec_info = {
  194.     .dec_hw_context_init = g4x_dec_hw_context_init,
  195.     .enc_hw_context_init = NULL,
  196.     .max_width = 2048,
  197.     .max_height = 2048,
  198. };
  199.  
  200. extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
  201. static struct hw_codec_info ironlake_hw_codec_info = {
  202.     .dec_hw_context_init = ironlake_dec_hw_context_init,
  203.     .enc_hw_context_init = NULL,
  204.     .max_width = 2048,
  205.     .max_height = 2048,
  206. };
  207.  
  208. extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
  209. extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
  210. static struct hw_codec_info gen6_hw_codec_info = {
  211.     .dec_hw_context_init = gen6_dec_hw_context_init,
  212.     .enc_hw_context_init = gen6_enc_hw_context_init,
  213.     .max_width = 2048,
  214.     .max_height = 2048,
  215. };
  216.  
  217. extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, VAProfile);
  218. static struct hw_codec_info gen7_hw_codec_info = {
  219.     .dec_hw_context_init = gen7_dec_hw_context_init,
  220.     .enc_hw_context_init = gen6_enc_hw_context_init,
  221.     .max_width = 4096,
  222.     .max_height = 4096,
  223. };
  224.  
  225. static struct hw_codec_info gen75_hw_codec_info = {
  226.     .dec_hw_context_init = gen75_dec_hw_context_init,
  227.     .enc_hw_context_init = gen75_enc_hw_context_init,
  228.     .max_width = 4096,
  229.     .max_height = 4096,
  230. };
  231.  
  232. VAStatus
  233. i965_QueryConfigProfiles(VADriverContextP ctx,
  234.                          VAProfile *profile_list,       /* out */
  235.                          int *num_profiles)             /* out */
  236. {
  237.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  238.     int i = 0;
  239.  
  240.     if (HAS_MPEG2(i965)) {
  241.         profile_list[i++] = VAProfileMPEG2Simple;
  242.         profile_list[i++] = VAProfileMPEG2Main;
  243.     }
  244.  
  245.     if (HAS_H264(i965)) {
  246.         profile_list[i++] = VAProfileH264Baseline;
  247.         profile_list[i++] = VAProfileH264Main;
  248.         profile_list[i++] = VAProfileH264High;
  249.     }
  250.  
  251.     if (HAS_VC1(i965)) {
  252.         profile_list[i++] = VAProfileVC1Simple;
  253.         profile_list[i++] = VAProfileVC1Main;
  254.         profile_list[i++] = VAProfileVC1Advanced;
  255.     }
  256.  
  257. #ifdef HAVE_VA_JPEG_DECODE
  258.     if (HAS_JPEG(i965)) {
  259.         profile_list[i++] = VAProfileJPEGBaseline;
  260.     }
  261. #endif
  262.  
  263.     /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
  264.     assert(i <= I965_MAX_PROFILES);
  265.     *num_profiles = i;
  266.  
  267.     return VA_STATUS_SUCCESS;
  268. }
  269.  
  270. VAStatus
  271. i965_QueryConfigEntrypoints(VADriverContextP ctx,
  272.                             VAProfile profile,
  273.                             VAEntrypoint *entrypoint_list,      /* out */
  274.                             int *num_entrypoints)               /* out */
  275. {
  276.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  277.     int n = 0;
  278.  
  279.     printf("%s profile %d\n", __FUNCTION__, profile);
  280.     printf("devid %x gen6=%d has264=%d bsd=%d\n",
  281.             i965->intel.device_id, IS_GEN6(i965->intel.device_id),
  282.             HAS_H264(i965), i965->intel.has_bsd);
  283.    
  284.     switch (profile) {
  285.     case VAProfileMPEG2Simple:
  286.     case VAProfileMPEG2Main:
  287.         if (HAS_MPEG2(i965))
  288.             entrypoint_list[n++] = VAEntrypointVLD;
  289.         break;
  290.  
  291.     case VAProfileH264Baseline:
  292.     case VAProfileH264Main:
  293.     case VAProfileH264High:
  294.         if (HAS_H264(i965))
  295.             entrypoint_list[n++] = VAEntrypointVLD;
  296.  
  297.         if (HAS_ENCODER(i965))
  298.             entrypoint_list[n++] = VAEntrypointEncSlice;
  299.  
  300.         break;
  301.  
  302.     case VAProfileVC1Simple:
  303.     case VAProfileVC1Main:
  304.     case VAProfileVC1Advanced:
  305.         if (HAS_VC1(i965))
  306.             entrypoint_list[n++] = VAEntrypointVLD;
  307.         break;
  308.  
  309.     case VAProfileJPEGBaseline:
  310.         if (HAS_JPEG(i965))
  311.             entrypoint_list[n++] = VAEntrypointVLD;
  312.         break;
  313.  
  314.     default:
  315.         break;
  316.     }
  317.  
  318.     /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
  319.     assert(n <= I965_MAX_ENTRYPOINTS);
  320.     *num_entrypoints = n;
  321.    
  322.    
  323.     return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
  324. }
  325.  
  326. VAStatus
  327. i965_GetConfigAttributes(VADriverContextP ctx,
  328.                          VAProfile profile,
  329.                          VAEntrypoint entrypoint,
  330.                          VAConfigAttrib *attrib_list,  /* in/out */
  331.                          int num_attribs)
  332. {
  333.     int i;
  334.  
  335.     /* Other attributes don't seem to be defined */
  336.     /* What to do if we don't know the attribute? */
  337.     for (i = 0; i < num_attribs; i++) {
  338.         switch (attrib_list[i].type) {
  339.         case VAConfigAttribRTFormat:
  340.             attrib_list[i].value = VA_RT_FORMAT_YUV420;
  341.             break;
  342.  
  343.         case VAConfigAttribRateControl:
  344.             attrib_list[i].value = VA_RC_VBR;
  345.             break;
  346.  
  347.         default:
  348.             /* Do nothing */
  349.             attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
  350.             break;
  351.         }
  352.     }
  353.  
  354.     return VA_STATUS_SUCCESS;
  355. }
  356.  
  357. static void
  358. i965_destroy_config(struct object_heap *heap, struct object_base *obj)
  359. {
  360.     object_heap_free(heap, obj);
  361. }
  362.  
  363. static VAStatus
  364. i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
  365. {
  366.     int i;
  367.  
  368.     /* Check existing attrbiutes */
  369.     for (i = 0; obj_config->num_attribs < i; i++) {
  370.         if (obj_config->attrib_list[i].type == attrib->type) {
  371.             /* Update existing attribute */
  372.             obj_config->attrib_list[i].value = attrib->value;
  373.             return VA_STATUS_SUCCESS;
  374.         }
  375.     }
  376.  
  377.     if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
  378.         i = obj_config->num_attribs;
  379.         obj_config->attrib_list[i].type = attrib->type;
  380.         obj_config->attrib_list[i].value = attrib->value;
  381.         obj_config->num_attribs++;
  382.         return VA_STATUS_SUCCESS;
  383.     }
  384.  
  385.     return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
  386. }
  387.  
  388. VAStatus
  389. i965_CreateConfig(VADriverContextP ctx,
  390.                   VAProfile profile,
  391.                   VAEntrypoint entrypoint,
  392.                   VAConfigAttrib *attrib_list,
  393.                   int num_attribs,
  394.                   VAConfigID *config_id)        /* out */
  395. {
  396.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  397.     struct object_config *obj_config;
  398.     int configID;
  399.     int i;
  400.     VAStatus vaStatus;
  401.  
  402.     printf("%s\n, profile %d", __FUNCTION__, profile );
  403.    
  404.     /* Validate profile & entrypoint */
  405.     switch (profile) {
  406.     case VAProfileMPEG2Simple:
  407.     case VAProfileMPEG2Main:
  408.         if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
  409.             vaStatus = VA_STATUS_SUCCESS;
  410.         } else {
  411.             vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
  412.         }
  413.         break;
  414.  
  415.     case VAProfileH264Baseline:
  416.     case VAProfileH264Main:
  417.     case VAProfileH264High:
  418.         if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
  419.             (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
  420.             vaStatus = VA_STATUS_SUCCESS;
  421.         } else {
  422.             vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
  423.         }
  424.  
  425.         break;
  426.  
  427.     case VAProfileVC1Simple:
  428.     case VAProfileVC1Main:
  429.     case VAProfileVC1Advanced:
  430.         if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
  431.             vaStatus = VA_STATUS_SUCCESS;
  432.         } else {
  433.             vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
  434.         }
  435.  
  436.         break;
  437.  
  438.     case VAProfileJPEGBaseline:
  439.         if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
  440.             vaStatus = VA_STATUS_SUCCESS;
  441.         } else {
  442.             vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
  443.         }
  444.  
  445.         break;
  446.  
  447.     default:
  448.         vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
  449.         break;
  450.     }
  451.  
  452.     if (VA_STATUS_SUCCESS != vaStatus) {
  453.         return vaStatus;
  454.     }
  455.  
  456.     configID = NEW_CONFIG_ID();
  457.     obj_config = CONFIG(configID);
  458.  
  459.     if (NULL == obj_config) {
  460.         vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
  461.         return vaStatus;
  462.     }
  463.  
  464.     obj_config->profile = profile;
  465.     obj_config->entrypoint = entrypoint;
  466.     obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
  467.     obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
  468.     obj_config->num_attribs = 1;
  469.  
  470.     for(i = 0; i < num_attribs; i++) {
  471.         vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
  472.  
  473.         if (VA_STATUS_SUCCESS != vaStatus) {
  474.             break;
  475.         }
  476.     }
  477.  
  478.     /* Error recovery */
  479.     if (VA_STATUS_SUCCESS != vaStatus) {
  480.         i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
  481.     } else {
  482.         *config_id = configID;
  483.     }
  484.  
  485.     return vaStatus;
  486. }
  487.  
  488. VAStatus
  489. i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
  490. {
  491.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  492.     struct object_config *obj_config = CONFIG(config_id);
  493.     VAStatus vaStatus;
  494.  
  495.     if (NULL == obj_config) {
  496.         vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
  497.         return vaStatus;
  498.     }
  499.  
  500.     i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
  501.     return VA_STATUS_SUCCESS;
  502. }
  503.  
  504. VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
  505.                                     VAConfigID config_id,
  506.                                     VAProfile *profile,                 /* out */
  507.                                     VAEntrypoint *entrypoint,           /* out */
  508.                                     VAConfigAttrib *attrib_list,        /* out */
  509.                                     int *num_attribs)                   /* out */
  510. {
  511.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  512.     struct object_config *obj_config = CONFIG(config_id);
  513.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  514.     int i;
  515.  
  516.     assert(obj_config);
  517.     *profile = obj_config->profile;
  518.     *entrypoint = obj_config->entrypoint;
  519.     *num_attribs = obj_config->num_attribs;
  520.  
  521.     for(i = 0; i < obj_config->num_attribs; i++) {
  522.         attrib_list[i] = obj_config->attrib_list[i];
  523.     }
  524.  
  525.     return vaStatus;
  526. }
  527.  
  528. static void
  529. i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
  530. {
  531.     struct object_surface *obj_surface = (struct object_surface *)obj;
  532.  
  533.     dri_bo_unreference(obj_surface->bo);
  534.     obj_surface->bo = NULL;
  535.  
  536.     if (obj_surface->free_private_data != NULL) {
  537.         obj_surface->free_private_data(&obj_surface->private_data);
  538.         obj_surface->private_data = NULL;
  539.     }
  540.  
  541.     object_heap_free(heap, obj);
  542. }
  543.  
  544. VAStatus
  545. i965_CreateSurfaces(VADriverContextP ctx,
  546.                     int width,
  547.                     int height,
  548.                     int format,
  549.                     int num_surfaces,
  550.                     VASurfaceID *surfaces)      /* out */
  551. {
  552.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  553.     int i,j;
  554.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  555.  
  556.     /* We only support one format */
  557.     if (VA_RT_FORMAT_YUV420 != format) {
  558.         return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
  559.     }
  560.  
  561.     for (i = 0; i < num_surfaces; i++) {
  562.         int surfaceID = NEW_SURFACE_ID();
  563.         struct object_surface *obj_surface = SURFACE(surfaceID);
  564.  
  565.         if (NULL == obj_surface) {
  566.             vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
  567.             break;
  568.         }
  569.  
  570.         surfaces[i] = surfaceID;
  571.         obj_surface->status = VASurfaceReady;
  572.         obj_surface->orig_width = width;
  573.         obj_surface->orig_height = height;
  574.  
  575.         if (IS_G4X(i965->intel.device_id) || IS_IRONLAKE(i965->intel.device_id)) {
  576.                 obj_surface->width = ALIGN(width, 16);
  577.                 obj_surface->height = ALIGN(height, 16);
  578.         } else {
  579.                 obj_surface->width = ALIGN(width, 128);
  580.                 obj_surface->height = ALIGN(height, 32);
  581.         }
  582.  
  583.         obj_surface->subpic_render_idx = 0;
  584.         for(j = 0; j < I965_MAX_SUBPIC_SUM; j++){
  585.            obj_surface->subpic[j] = VA_INVALID_ID;
  586.         }
  587.  
  588.         obj_surface->flags = SURFACE_REFERENCED;
  589.         obj_surface->fourcc = 0;
  590.         obj_surface->bo = NULL;
  591.         obj_surface->locked_image_id = VA_INVALID_ID;
  592.         obj_surface->private_data = NULL;
  593.         obj_surface->free_private_data = NULL;
  594.         obj_surface->subsampling = SUBSAMPLE_YUV420;
  595.     }
  596.  
  597.     /* Error recovery */
  598.     if (VA_STATUS_SUCCESS != vaStatus) {
  599.         /* surfaces[i-1] was the last successful allocation */
  600.         for (; i--; ) {
  601.             struct object_surface *obj_surface = SURFACE(surfaces[i]);
  602.  
  603.             surfaces[i] = VA_INVALID_SURFACE;
  604.             assert(obj_surface);
  605.             i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
  606.         }
  607.     }
  608.  
  609.     return vaStatus;
  610. }
  611.  
  612. VAStatus
  613. i965_DestroySurfaces(VADriverContextP ctx,
  614.                      VASurfaceID *surface_list,
  615.                      int num_surfaces)
  616. {
  617.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  618.     int i;
  619.  
  620.     for (i = num_surfaces; i--; ) {
  621.         struct object_surface *obj_surface = SURFACE(surface_list[i]);
  622.  
  623.         assert(obj_surface);
  624.         i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
  625.     }
  626.  
  627.     return VA_STATUS_SUCCESS;
  628. }
  629.  
  630. VAStatus
  631. i965_QueryImageFormats(VADriverContextP ctx,
  632.                        VAImageFormat *format_list,      /* out */
  633.                        int *num_formats)                /* out */
  634. {
  635.     int n;
  636.  
  637.     for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
  638.         const i965_image_format_map_t * const m = &i965_image_formats_map[n];
  639.         if (format_list)
  640.             format_list[n] = m->va_format;
  641.     }
  642.  
  643.     if (num_formats)
  644.         *num_formats = n;
  645.  
  646.     return VA_STATUS_SUCCESS;
  647. }
  648.  
  649. VAStatus
  650. i965_PutImage(VADriverContextP ctx,
  651.               VASurfaceID surface,
  652.               VAImageID image,
  653.               int src_x,
  654.               int src_y,
  655.               unsigned int src_width,
  656.               unsigned int src_height,
  657.               int dest_x,
  658.               int dest_y,
  659.               unsigned int dest_width,
  660.               unsigned int dest_height)
  661. {
  662.     return VA_STATUS_SUCCESS;
  663. }
  664.  
  665. VAStatus
  666. i965_QuerySubpictureFormats(VADriverContextP ctx,
  667.                             VAImageFormat *format_list,         /* out */
  668.                             unsigned int *flags,                /* out */
  669.                             unsigned int *num_formats)          /* out */
  670. {
  671.     int n;
  672.  
  673.     for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
  674.         const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
  675.         if (format_list)
  676.             format_list[n] = m->va_format;
  677.         if (flags)
  678.             flags[n] = m->va_flags;
  679.     }
  680.  
  681.     if (num_formats)
  682.         *num_formats = n;
  683.  
  684.     return VA_STATUS_SUCCESS;
  685. }
  686.  
  687. static void
  688. i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
  689. {
  690.     //    struct object_subpic *obj_subpic = (struct object_subpic *)obj;
  691.  
  692.     object_heap_free(heap, obj);
  693. }
  694.  
  695. VAStatus
  696. i965_CreateSubpicture(VADriverContextP ctx,
  697.                       VAImageID image,
  698.                       VASubpictureID *subpicture)         /* out */
  699. {
  700.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  701.     VASubpictureID subpicID = NEW_SUBPIC_ID()
  702.     struct object_subpic *obj_subpic = SUBPIC(subpicID);
  703.  
  704.     if (!obj_subpic)
  705.         return VA_STATUS_ERROR_ALLOCATION_FAILED;
  706.  
  707.     struct object_image *obj_image = IMAGE(image);
  708.     if (!obj_image)
  709.         return VA_STATUS_ERROR_INVALID_IMAGE;
  710.  
  711.     const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
  712.     if (!m)
  713.         return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
  714.  
  715.     *subpicture = subpicID;
  716.     obj_subpic->image  = image;
  717.     obj_subpic->format = m->format;
  718.     obj_subpic->width  = obj_image->image.width;
  719.     obj_subpic->height = obj_image->image.height;
  720.     obj_subpic->pitch  = obj_image->image.pitches[0];
  721.     obj_subpic->bo     = obj_image->bo;
  722.     obj_subpic->global_alpha = 1.0;
  723.  
  724.     return VA_STATUS_SUCCESS;
  725. }
  726.  
  727. VAStatus
  728. i965_DestroySubpicture(VADriverContextP ctx,
  729.                        VASubpictureID subpicture)
  730. {
  731.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  732.     struct object_subpic *obj_subpic = SUBPIC(subpicture);
  733.     i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
  734.     return VA_STATUS_SUCCESS;
  735. }
  736.  
  737. VAStatus
  738. i965_SetSubpictureImage(VADriverContextP ctx,
  739.                         VASubpictureID subpicture,
  740.                         VAImageID image)
  741. {
  742.     /* TODO */
  743.     return VA_STATUS_ERROR_UNIMPLEMENTED;
  744. }
  745.  
  746. VAStatus
  747. i965_SetSubpictureChromakey(VADriverContextP ctx,
  748.                             VASubpictureID subpicture,
  749.                             unsigned int chromakey_min,
  750.                             unsigned int chromakey_max,
  751.                             unsigned int chromakey_mask)
  752. {
  753.     /* TODO */
  754.     return VA_STATUS_ERROR_UNIMPLEMENTED;
  755. }
  756.  
  757. VAStatus
  758. i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
  759.                               VASubpictureID subpicture,
  760.                               float global_alpha)
  761. {
  762.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  763.     struct object_subpic *obj_subpic = SUBPIC(subpicture);
  764.  
  765.     if(global_alpha > 1.0 || global_alpha < 0.0){
  766.        return VA_STATUS_ERROR_INVALID_PARAMETER;
  767.     }
  768.     obj_subpic->global_alpha  = global_alpha;
  769.  
  770.     return VA_STATUS_SUCCESS;
  771. }
  772.  
  773. VAStatus
  774. i965_AssociateSubpicture(VADriverContextP ctx,
  775.                          VASubpictureID subpicture,
  776.                          VASurfaceID *target_surfaces,
  777.                          int num_surfaces,
  778.                          short src_x, /* upper left offset in subpicture */
  779.                          short src_y,
  780.                          unsigned short src_width,
  781.                          unsigned short src_height,
  782.                          short dest_x, /* upper left offset in surface */
  783.                          short dest_y,
  784.                          unsigned short dest_width,
  785.                          unsigned short dest_height,
  786.                          /*
  787.                           * whether to enable chroma-keying or global-alpha
  788.                           * see VA_SUBPICTURE_XXX values
  789.                           */
  790.                          unsigned int flags)
  791. {
  792.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  793.     struct object_subpic *obj_subpic = SUBPIC(subpicture);
  794.     int i, j;
  795.  
  796.     obj_subpic->src_rect.x      = src_x;
  797.     obj_subpic->src_rect.y      = src_y;
  798.     obj_subpic->src_rect.width  = src_width;
  799.     obj_subpic->src_rect.height = src_height;
  800.     obj_subpic->dst_rect.x      = dest_x;
  801.     obj_subpic->dst_rect.y      = dest_y;
  802.     obj_subpic->dst_rect.width  = dest_width;
  803.     obj_subpic->dst_rect.height = dest_height;
  804.     obj_subpic->flags           = flags;
  805.  
  806.     for (i = 0; i < num_surfaces; i++) {
  807.         struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
  808.         if (!obj_surface)
  809.             return VA_STATUS_ERROR_INVALID_SURFACE;
  810.  
  811.         for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
  812.             if(obj_surface->subpic[j] == VA_INVALID_ID){
  813.                obj_surface->subpic[j] = subpicture;
  814.                break;
  815.             }
  816.         }
  817.  
  818.         if(j == I965_MAX_SUBPIC_SUM){
  819.             return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
  820.         }
  821.  
  822.     }
  823.     return VA_STATUS_SUCCESS;
  824. }
  825.  
  826.  
  827. VAStatus
  828. i965_DeassociateSubpicture(VADriverContextP ctx,
  829.                            VASubpictureID subpicture,
  830.                            VASurfaceID *target_surfaces,
  831.                            int num_surfaces)
  832. {
  833.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  834.     int i, j;
  835.  
  836.     for (i = 0; i < num_surfaces; i++) {
  837.         struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
  838.         if (!obj_surface)
  839.             return VA_STATUS_ERROR_INVALID_SURFACE;
  840.  
  841.         for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
  842.             if(obj_surface->subpic[j] == subpicture){
  843.                obj_surface->subpic[j] = VA_INVALID_ID;
  844.                break;
  845.             }
  846.         }
  847.  
  848.         if(j == I965_MAX_SUBPIC_SUM){
  849.             return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
  850.         }
  851.     }
  852.     return VA_STATUS_SUCCESS;
  853. }
  854.  
  855. void
  856. i965_reference_buffer_store(struct buffer_store **ptr,
  857.                             struct buffer_store *buffer_store)
  858. {
  859.     assert(*ptr == NULL);
  860.  
  861.     if (buffer_store) {
  862.         buffer_store->ref_count++;
  863.         *ptr = buffer_store;
  864.     }
  865. }
  866.  
  867. void
  868. i965_release_buffer_store(struct buffer_store **ptr)
  869. {
  870.     struct buffer_store *buffer_store = *ptr;
  871.  
  872.     if (buffer_store == NULL)
  873.         return;
  874.  
  875.     assert(buffer_store->bo || buffer_store->buffer);
  876.     assert(!(buffer_store->bo && buffer_store->buffer));
  877.     buffer_store->ref_count--;
  878.  
  879.     if (buffer_store->ref_count == 0) {
  880.         dri_bo_unreference(buffer_store->bo);
  881.         free(buffer_store->buffer);
  882.         buffer_store->bo = NULL;
  883.         buffer_store->buffer = NULL;
  884.         free(buffer_store);
  885.     }
  886.  
  887.     *ptr = NULL;
  888. }
  889.  
  890. static void
  891. i965_destroy_context(struct object_heap *heap, struct object_base *obj)
  892. {
  893.     struct object_context *obj_context = (struct object_context *)obj;
  894.     int i;
  895.  
  896.     if (obj_context->hw_context) {
  897.         obj_context->hw_context->destroy(obj_context->hw_context);
  898.         obj_context->hw_context = NULL;
  899.     }
  900.  
  901.     if (obj_context->codec_type == CODEC_ENC) {
  902.         assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
  903.         i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
  904.         i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
  905.  
  906.         for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
  907.             i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
  908.  
  909.         free(obj_context->codec_state.encode.slice_params);
  910.     } else {
  911.         assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
  912.         assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
  913.  
  914.         i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
  915.         i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
  916.         i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
  917.  
  918.         for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
  919.             i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
  920.  
  921.         for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
  922.             i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
  923.  
  924.         free(obj_context->codec_state.decode.slice_params);
  925.         free(obj_context->codec_state.decode.slice_datas);
  926.     }
  927.  
  928.     free(obj_context->render_targets);
  929.     object_heap_free(heap, obj);
  930. }
  931.  
  932. VAStatus
  933. i965_CreateContext(VADriverContextP ctx,
  934.                    VAConfigID config_id,
  935.                    int picture_width,
  936.                    int picture_height,
  937.                    int flag,
  938.                    VASurfaceID *render_targets,
  939.                    int num_render_targets,
  940.                    VAContextID *context)                /* out */
  941. {
  942.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  943.     struct i965_render_state *render_state = &i965->render_state;
  944.     struct object_config *obj_config = CONFIG(config_id);
  945.     struct object_context *obj_context = NULL;
  946.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  947.     int contextID;
  948.     int i;
  949.  
  950.     if (NULL == obj_config) {
  951.         vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
  952.         return vaStatus;
  953.     }
  954.  
  955.     if (picture_width > i965->codec_info->max_width ||
  956.         picture_height > i965->codec_info->max_height) {
  957.         vaStatus = VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED;
  958.         return vaStatus;
  959.     }
  960.  
  961.     /* Validate flag */
  962.     /* Validate picture dimensions */
  963.     contextID = NEW_CONTEXT_ID();
  964.     obj_context = CONTEXT(contextID);
  965.  
  966.     if (NULL == obj_context) {
  967.         vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
  968.         return vaStatus;
  969.     }
  970.  
  971.     render_state->inited = 1;
  972.  
  973.     switch (obj_config->profile) {
  974.     case VAProfileH264Baseline:
  975.     case VAProfileH264Main:
  976.     case VAProfileH264High:
  977.         if (!HAS_H264(i965))
  978.             return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
  979.         render_state->interleaved_uv = 1;
  980.         break;
  981.     default:
  982.         render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
  983.         break;
  984.     }
  985.  
  986.     *context = contextID;
  987.     obj_context->flags = flag;
  988.     obj_context->context_id = contextID;
  989.     obj_context->config_id = config_id;
  990.     obj_context->picture_width = picture_width;
  991.     obj_context->picture_height = picture_height;
  992.     obj_context->num_render_targets = num_render_targets;
  993.     obj_context->render_targets =
  994.         (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
  995.     obj_context->hw_context = NULL;
  996.  
  997.     for(i = 0; i < num_render_targets; i++) {
  998.         if (NULL == SURFACE(render_targets[i])) {
  999.             vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
  1000.             break;
  1001.         }
  1002.  
  1003.         obj_context->render_targets[i] = render_targets[i];
  1004.     }
  1005.  
  1006.     if (VA_STATUS_SUCCESS == vaStatus) {
  1007.         if (VAEntrypointEncSlice == obj_config->entrypoint ) { /*encode routin only*/
  1008.             obj_context->codec_type = CODEC_ENC;
  1009.             memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
  1010.             obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
  1011.             obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
  1012.             obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
  1013.                                                                sizeof(*obj_context->codec_state.encode.slice_params));
  1014.             assert(i965->codec_info->enc_hw_context_init);
  1015.             obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
  1016.         } else {
  1017.             obj_context->codec_type = CODEC_DEC;
  1018.             memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
  1019.             obj_context->codec_state.decode.current_render_target = -1;
  1020.             obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
  1021.             obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
  1022.             obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
  1023.                                                                sizeof(*obj_context->codec_state.decode.slice_params));
  1024.             obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
  1025.                                                               sizeof(*obj_context->codec_state.decode.slice_datas));
  1026.  
  1027.             assert(i965->codec_info->dec_hw_context_init);
  1028.             obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
  1029.         }
  1030.     }
  1031.  
  1032.     /* Error recovery */
  1033.     if (VA_STATUS_SUCCESS != vaStatus) {
  1034.         i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
  1035.     }
  1036.  
  1037.     return vaStatus;
  1038. }
  1039.  
  1040. VAStatus
  1041. i965_DestroyContext(VADriverContextP ctx, VAContextID context)
  1042. {
  1043.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1044.     struct object_context *obj_context = CONTEXT(context);
  1045.  
  1046.     assert(obj_context);
  1047.     i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
  1048.  
  1049.     return VA_STATUS_SUCCESS;
  1050. }
  1051.  
  1052. static void
  1053. i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
  1054. {
  1055.     struct object_buffer *obj_buffer = (struct object_buffer *)obj;
  1056.  
  1057.     assert(obj_buffer->buffer_store);
  1058.     i965_release_buffer_store(&obj_buffer->buffer_store);
  1059.     object_heap_free(heap, obj);
  1060. }
  1061.  
  1062. static VAStatus
  1063. i965_create_buffer_internal(VADriverContextP ctx,
  1064.                             VAContextID context,
  1065.                             VABufferType type,
  1066.                             unsigned int size,
  1067.                             unsigned int num_elements,
  1068.                             void *data,
  1069.                             dri_bo *store_bo,
  1070.                             VABufferID *buf_id)
  1071. {
  1072.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1073.     struct object_buffer *obj_buffer = NULL;
  1074.     struct buffer_store *buffer_store = NULL;
  1075.     int bufferID;
  1076.  
  1077.     /* Validate type */
  1078.     switch (type) {
  1079.     case VAPictureParameterBufferType:
  1080.     case VAIQMatrixBufferType:
  1081.     case VABitPlaneBufferType:
  1082.     case VASliceGroupMapBufferType:
  1083.     case VASliceParameterBufferType:
  1084.     case VASliceDataBufferType:
  1085.     case VAMacroblockParameterBufferType:
  1086.     case VAResidualDataBufferType:
  1087.     case VADeblockingParameterBufferType:
  1088.     case VAImageBufferType:
  1089.     case VAEncCodedBufferType:
  1090.     case VAEncSequenceParameterBufferType:
  1091.     case VAEncPictureParameterBufferType:
  1092.     case VAEncSliceParameterBufferType:
  1093. #ifdef HAVE_VA_JPEG_DECODE
  1094.      case VAHuffmanTableBufferType:
  1095. #endif
  1096.         /* Ok */
  1097.         break;
  1098.  
  1099.     default:
  1100.         return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
  1101.     }
  1102.  
  1103.     bufferID = NEW_BUFFER_ID();
  1104.     obj_buffer = BUFFER(bufferID);
  1105.  
  1106.     if (NULL == obj_buffer) {
  1107.         return VA_STATUS_ERROR_ALLOCATION_FAILED;
  1108.     }
  1109.  
  1110.     if (type == VAEncCodedBufferType) {
  1111.         size += ALIGN(sizeof(VACodedBufferSegment), 64);
  1112.     }
  1113.  
  1114.     obj_buffer->max_num_elements = num_elements;
  1115.     obj_buffer->num_elements = num_elements;
  1116.     obj_buffer->size_element = size;
  1117.     obj_buffer->type = type;
  1118.     obj_buffer->buffer_store = NULL;
  1119.     buffer_store = calloc(1, sizeof(struct buffer_store));
  1120.     assert(buffer_store);
  1121.     buffer_store->ref_count = 1;
  1122.  
  1123.     if (store_bo != NULL) {
  1124.         buffer_store->bo = store_bo;
  1125.         dri_bo_reference(buffer_store->bo);
  1126.  
  1127.         if (data)
  1128.             dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
  1129.     } else if (type == VASliceDataBufferType || type == VAImageBufferType || type == VAEncCodedBufferType) {
  1130.         buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
  1131.                                         "Buffer",
  1132.                                         size * num_elements, 64);
  1133.         assert(buffer_store->bo);
  1134.  
  1135.         if (type == VAEncCodedBufferType) {
  1136.             VACodedBufferSegment *coded_buffer_segment;
  1137.             dri_bo_map(buffer_store->bo, 1);
  1138.             coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
  1139.             coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
  1140.             coded_buffer_segment->bit_offset = 0;
  1141.             coded_buffer_segment->status = 0;
  1142.             coded_buffer_segment->buf = NULL;
  1143.             coded_buffer_segment->next = NULL;
  1144.             dri_bo_unmap(buffer_store->bo);
  1145.         } else if (data) {
  1146.             dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
  1147.         }
  1148.  
  1149.     } else {
  1150.         buffer_store->buffer = malloc(size * num_elements);
  1151.         assert(buffer_store->buffer);
  1152.  
  1153.         if (data)
  1154.             memcpy(buffer_store->buffer, data, size * num_elements);
  1155.     }
  1156.  
  1157.     buffer_store->num_elements = obj_buffer->num_elements;
  1158.     i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
  1159.     i965_release_buffer_store(&buffer_store);
  1160.     *buf_id = bufferID;
  1161.  
  1162.     return VA_STATUS_SUCCESS;
  1163. }
  1164.  
  1165. VAStatus
  1166. i965_CreateBuffer(VADriverContextP ctx,
  1167.                   VAContextID context,          /* in */
  1168.                   VABufferType type,            /* in */
  1169.                   unsigned int size,            /* in */
  1170.                   unsigned int num_elements,    /* in */
  1171.                   void *data,                   /* in */
  1172.                   VABufferID *buf_id)           /* out */
  1173. {
  1174.     return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
  1175. }
  1176.  
  1177.  
  1178. VAStatus
  1179. i965_BufferSetNumElements(VADriverContextP ctx,
  1180.                           VABufferID buf_id,           /* in */
  1181.                           unsigned int num_elements)   /* in */
  1182. {
  1183.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1184.     struct object_buffer *obj_buffer = BUFFER(buf_id);
  1185.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  1186.  
  1187.     assert(obj_buffer);
  1188.  
  1189.     if ((num_elements < 0) ||
  1190.         (num_elements > obj_buffer->max_num_elements)) {
  1191.         vaStatus = VA_STATUS_ERROR_UNKNOWN;
  1192.     } else {
  1193.         obj_buffer->num_elements = num_elements;
  1194.         if (obj_buffer->buffer_store != NULL) {
  1195.             obj_buffer->buffer_store->num_elements = num_elements;
  1196.         }
  1197.     }
  1198.  
  1199.     return vaStatus;
  1200. }
  1201.  
  1202. VAStatus
  1203. i965_MapBuffer(VADriverContextP ctx,
  1204.                VABufferID buf_id,       /* in */
  1205.                void **pbuf)             /* out */
  1206. {
  1207.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1208.     struct object_buffer *obj_buffer = BUFFER(buf_id);
  1209.     VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
  1210.  
  1211.     assert(obj_buffer && obj_buffer->buffer_store);
  1212.     assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
  1213.     assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
  1214.  
  1215.     if (NULL != obj_buffer->buffer_store->bo) {
  1216.         unsigned int tiling, swizzle;
  1217.  
  1218.         dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
  1219.  
  1220.         if (tiling != I915_TILING_NONE)
  1221.             drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
  1222.         else
  1223.             dri_bo_map(obj_buffer->buffer_store->bo, 1);
  1224.  
  1225.         assert(obj_buffer->buffer_store->bo->virtual);
  1226.         *pbuf = obj_buffer->buffer_store->bo->virtual;
  1227.  
  1228.         if (obj_buffer->type == VAEncCodedBufferType) {
  1229.             VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
  1230.             coded_buffer_segment->buf = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
  1231.         }
  1232.  
  1233.         vaStatus = VA_STATUS_SUCCESS;
  1234.     } else if (NULL != obj_buffer->buffer_store->buffer) {
  1235.         *pbuf = obj_buffer->buffer_store->buffer;
  1236.         vaStatus = VA_STATUS_SUCCESS;
  1237.     }
  1238.  
  1239.     return vaStatus;
  1240. }
  1241.  
  1242. VAStatus
  1243. i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
  1244. {
  1245.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1246.     struct object_buffer *obj_buffer = BUFFER(buf_id);
  1247.     VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
  1248.  
  1249.     assert(obj_buffer && obj_buffer->buffer_store);
  1250.     assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
  1251.     assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
  1252.  
  1253.     if (NULL != obj_buffer->buffer_store->bo) {
  1254.         unsigned int tiling, swizzle;
  1255.  
  1256.         dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
  1257.  
  1258.         if (tiling != I915_TILING_NONE)
  1259.             drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
  1260.         else
  1261.             dri_bo_unmap(obj_buffer->buffer_store->bo);
  1262.  
  1263.         vaStatus = VA_STATUS_SUCCESS;
  1264.     } else if (NULL != obj_buffer->buffer_store->buffer) {
  1265.         /* Do nothing */
  1266.         vaStatus = VA_STATUS_SUCCESS;
  1267.     }
  1268.  
  1269.     return vaStatus;
  1270. }
  1271.  
  1272. VAStatus
  1273. i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
  1274. {
  1275.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1276.     struct object_buffer *obj_buffer = BUFFER(buffer_id);
  1277.  
  1278.     assert(obj_buffer);
  1279.     i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
  1280.  
  1281.     return VA_STATUS_SUCCESS;
  1282. }
  1283.  
  1284. VAStatus
  1285. i965_BeginPicture(VADriverContextP ctx,
  1286.                   VAContextID context,
  1287.                   VASurfaceID render_target)
  1288. {
  1289.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1290.     struct object_context *obj_context = CONTEXT(context);
  1291.     struct object_surface *obj_surface = SURFACE(render_target);
  1292.     struct object_config *obj_config;
  1293.     VAContextID config;
  1294.     VAStatus vaStatus;
  1295.     int i;
  1296.  
  1297.     assert(obj_context);
  1298.     assert(obj_surface);
  1299.  
  1300.     config = obj_context->config_id;
  1301.     obj_config = CONFIG(config);
  1302.     assert(obj_config);
  1303.  
  1304.     switch (obj_config->profile) {
  1305.     case VAProfileMPEG2Simple:
  1306.     case VAProfileMPEG2Main:
  1307.         vaStatus = VA_STATUS_SUCCESS;
  1308.         break;
  1309.  
  1310.     case VAProfileH264Baseline:
  1311.     case VAProfileH264Main:
  1312.     case VAProfileH264High:
  1313.         vaStatus = VA_STATUS_SUCCESS;
  1314.         break;
  1315.  
  1316.     case VAProfileVC1Simple:
  1317.     case VAProfileVC1Main:
  1318.     case VAProfileVC1Advanced:
  1319.         vaStatus = VA_STATUS_SUCCESS;
  1320.         break;
  1321.  
  1322.     case VAProfileJPEGBaseline:
  1323.         vaStatus = VA_STATUS_SUCCESS;
  1324.         break;
  1325.  
  1326.     default:
  1327.         assert(0);
  1328.         vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
  1329.         break;
  1330.     }
  1331.  
  1332.     if (obj_context->codec_type == CODEC_ENC) {
  1333.         i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
  1334.         i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
  1335.  
  1336.         for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
  1337.             i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
  1338.         }
  1339.  
  1340.         obj_context->codec_state.encode.num_slice_params = 0;
  1341.         obj_context->codec_state.encode.current_render_target = render_target;     /*This is input new frame*/
  1342.     } else {
  1343.         obj_context->codec_state.decode.current_render_target = render_target;
  1344.         i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
  1345.         i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
  1346.         i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
  1347.         i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
  1348.  
  1349.         for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
  1350.             i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
  1351.             i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
  1352.         }
  1353.  
  1354.         obj_context->codec_state.decode.num_slice_params = 0;
  1355.         obj_context->codec_state.decode.num_slice_datas = 0;
  1356.     }
  1357.  
  1358.     return vaStatus;
  1359. }
  1360.  
  1361. #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
  1362.  
  1363. #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member)           \
  1364.     static VAStatus                                                     \
  1365.     i965_render_##category##_##name##_buffer(VADriverContextP ctx,      \
  1366.                                              struct object_context *obj_context, \
  1367.                                              struct object_buffer *obj_buffer) \
  1368.     {                                                                   \
  1369.         struct category##_state *category = &obj_context->codec_state.category; \
  1370.         assert(obj_buffer->buffer_store->bo == NULL);                   \
  1371.         assert(obj_buffer->buffer_store->buffer);                       \
  1372.         i965_release_buffer_store(&category->member);                   \
  1373.         i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
  1374.         return VA_STATUS_SUCCESS;                                       \
  1375.     }
  1376.  
  1377. #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member)            \
  1378.     static VAStatus                                                     \
  1379.     i965_render_##category##_##name##_buffer(VADriverContextP ctx,      \
  1380.                                              struct object_context *obj_context, \
  1381.                                              struct object_buffer *obj_buffer) \
  1382.     {                                                                   \
  1383.         struct category##_state *category = &obj_context->codec_state.category; \
  1384.         if (category->num_##member == category->max_##member) {         \
  1385.             category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
  1386.             memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
  1387.             category->max_##member += NUM_SLICES;                       \
  1388.         }                                                               \
  1389.         i965_release_buffer_store(&category->member[category->num_##member]); \
  1390.         i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
  1391.         category->num_##member++;                                       \
  1392.         return VA_STATUS_SUCCESS;                                       \
  1393.     }
  1394.  
  1395. #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
  1396.  
  1397. #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
  1398. DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
  1399. DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
  1400. DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
  1401. DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
  1402.  
  1403. #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
  1404. DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
  1405. DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
  1406.  
  1407. static VAStatus
  1408. i965_decoder_render_picture(VADriverContextP ctx,
  1409.                             VAContextID context,
  1410.                             VABufferID *buffers,
  1411.                             int num_buffers)
  1412. {
  1413.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1414.     struct object_context *obj_context = CONTEXT(context);
  1415.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  1416.     int i;
  1417.  
  1418.     for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
  1419.         struct object_buffer *obj_buffer = BUFFER(buffers[i]);
  1420.         assert(obj_buffer);
  1421.  
  1422.         switch (obj_buffer->type) {
  1423.         case VAPictureParameterBufferType:
  1424.             vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
  1425.             break;
  1426.  
  1427.         case VAIQMatrixBufferType:
  1428.             vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
  1429.             break;
  1430.  
  1431.         case VABitPlaneBufferType:
  1432.             vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
  1433.             break;
  1434.  
  1435.         case VASliceParameterBufferType:
  1436.             vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
  1437.             break;
  1438.  
  1439.         case VASliceDataBufferType:
  1440.             vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
  1441.             break;
  1442.  
  1443. #ifdef HAVE_VA_JPEG_DECODE
  1444.         case VAHuffmanTableBufferType:
  1445.             vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
  1446.             break;
  1447. #endif
  1448.  
  1449.         default:
  1450.             vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
  1451.             break;
  1452.         }
  1453.     }
  1454.  
  1455.     return vaStatus;
  1456. }
  1457.  
  1458. #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
  1459.  
  1460. #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
  1461. DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
  1462. DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
  1463. DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
  1464. DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
  1465. DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
  1466.  
  1467. #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
  1468. DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
  1469.  
  1470. static VAStatus
  1471. i965_encoder_render_picture(VADriverContextP ctx,
  1472.                             VAContextID context,
  1473.                             VABufferID *buffers,
  1474.                             int num_buffers)
  1475. {
  1476.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1477.     struct object_context *obj_context = CONTEXT(context);
  1478.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  1479.     int i;
  1480.  
  1481.     for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
  1482.         struct object_buffer *obj_buffer = BUFFER(buffers[i]);
  1483.         assert(obj_buffer);
  1484.  
  1485.         switch (obj_buffer->type) {
  1486.         case VAEncSequenceParameterBufferType:
  1487.             vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter);
  1488.             break;
  1489.  
  1490.         case VAEncPictureParameterBufferType:
  1491.             vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter);
  1492.             break;
  1493.  
  1494.         case VAEncSliceParameterBufferType:
  1495.             vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter);
  1496.             break;
  1497.  
  1498.         case VAPictureParameterBufferType:
  1499.             vaStatus = I965_RENDER_ENCODE_BUFFER(picture_control);
  1500.             break;
  1501.  
  1502.         case VAQMatrixBufferType:
  1503.             vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
  1504.             break;
  1505.  
  1506.         case VAIQMatrixBufferType:
  1507.             vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
  1508.             break;
  1509.  
  1510.         default:
  1511.             vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
  1512.             break;
  1513.         }
  1514.     }
  1515.  
  1516.     return vaStatus;
  1517. }
  1518.  
  1519. VAStatus
  1520. i965_RenderPicture(VADriverContextP ctx,
  1521.                    VAContextID context,
  1522.                    VABufferID *buffers,
  1523.                    int num_buffers)
  1524. {
  1525.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1526.     struct object_context *obj_context;
  1527.     struct object_config *obj_config;
  1528.     VAContextID config;
  1529.     VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
  1530.  
  1531.     obj_context = CONTEXT(context);
  1532.     assert(obj_context);
  1533.  
  1534.     config = obj_context->config_id;
  1535.     obj_config = CONFIG(config);
  1536.     assert(obj_config);
  1537.  
  1538.     if (VAEntrypointEncSlice == obj_config->entrypoint ){
  1539.         vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
  1540.     } else {
  1541.         vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
  1542.     }
  1543.  
  1544.     return vaStatus;
  1545. }
  1546.  
  1547. VAStatus
  1548. i965_EndPicture(VADriverContextP ctx, VAContextID context)
  1549. {
  1550.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1551.     struct object_context *obj_context = CONTEXT(context);
  1552.     struct object_config *obj_config;
  1553.     VAContextID config;
  1554.  
  1555.     assert(obj_context);
  1556.     config = obj_context->config_id;
  1557.     obj_config = CONFIG(config);
  1558.     assert(obj_config);
  1559.  
  1560.     if (obj_context->codec_type == CODEC_ENC) {
  1561.         assert(VAEntrypointEncSlice == obj_config->entrypoint);
  1562.  
  1563.         assert(obj_context->codec_state.encode.pic_param);
  1564.         assert(obj_context->codec_state.encode.seq_param);
  1565.         assert(obj_context->codec_state.encode.num_slice_params >= 1);
  1566.     } else {
  1567.         assert(obj_context->codec_state.decode.pic_param);
  1568.         assert(obj_context->codec_state.decode.num_slice_params >= 1);
  1569.         assert(obj_context->codec_state.decode.num_slice_datas >= 1);
  1570.         assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
  1571.     }
  1572.  
  1573.     assert(obj_context->hw_context->run);
  1574.     obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
  1575.  
  1576.     return VA_STATUS_SUCCESS;
  1577. }
  1578.  
  1579. VAStatus
  1580. i965_SyncSurface(VADriverContextP ctx,
  1581.                  VASurfaceID render_target)
  1582. {
  1583.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1584.     struct object_surface *obj_surface = SURFACE(render_target);
  1585.  
  1586.     assert(obj_surface);
  1587.  
  1588.     if(obj_surface->bo)
  1589.         drm_intel_bo_wait_rendering(obj_surface->bo);
  1590.  
  1591.     return VA_STATUS_SUCCESS;
  1592. }
  1593.  
  1594. VAStatus
  1595. i965_QuerySurfaceStatus(VADriverContextP ctx,
  1596.                         VASurfaceID render_target,
  1597.                         VASurfaceStatus *status)        /* out */
  1598. {
  1599.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1600.     struct object_surface *obj_surface = SURFACE(render_target);
  1601.  
  1602.     assert(obj_surface);
  1603.  
  1604.     if (obj_surface->bo) {
  1605. //        if (drm_intel_bo_busy(obj_surface->bo)){
  1606. //            *status = VASurfaceRendering;
  1607. //        }
  1608. //        else {
  1609.             *status = VASurfaceReady;
  1610. //        }
  1611.     } else {
  1612.         *status = VASurfaceReady;
  1613.     }
  1614.  
  1615.     return VA_STATUS_SUCCESS;
  1616. }
  1617.  
  1618. static VADisplayAttribute *
  1619. get_display_attribute(VADriverContextP ctx, VADisplayAttribType type)
  1620. {
  1621.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  1622.     unsigned int i;
  1623.  
  1624.     if (!i965->display_attributes)
  1625.         return NULL;
  1626.  
  1627.     for (i = 0; i < i965->num_display_attributes; i++) {
  1628.         if (i965->display_attributes[i].type == type)
  1629.             return &i965->display_attributes[i];
  1630.     }
  1631.     return NULL;
  1632. }
  1633.  
  1634. static bool
  1635. i965_display_attributes_init(VADriverContextP ctx)
  1636. {
  1637.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  1638.  
  1639.     printf("%s\n", __FUNCTION__);
  1640.  
  1641.     i965->num_display_attributes = ARRAY_ELEMS(i965_display_attributes);
  1642.     i965->display_attributes = malloc(
  1643.         i965->num_display_attributes * sizeof(i965->display_attributes[0]));
  1644.     if (!i965->display_attributes)
  1645.         return false;
  1646.  
  1647.     memcpy(
  1648.         i965->display_attributes,
  1649.         i965_display_attributes,
  1650.         sizeof(i965_display_attributes)
  1651.     );
  1652.  
  1653.     i965->rotation_attrib = get_display_attribute(ctx, VADisplayAttribRotation);
  1654.     if (!i965->rotation_attrib)
  1655.         return false;
  1656.     return true;
  1657. }
  1658.  
  1659. static void
  1660. i965_display_attributes_terminate(VADriverContextP ctx)
  1661. {
  1662.     struct i965_driver_data * const i965 = i965_driver_data(ctx);
  1663.  
  1664.     if (i965->display_attributes) {
  1665.         free(i965->display_attributes);
  1666.         i965->display_attributes = NULL;
  1667.         i965->num_display_attributes = 0;
  1668.     }
  1669. }
  1670.  
  1671. /*
  1672.  * Query display attributes
  1673.  * The caller must provide a "attr_list" array that can hold at
  1674.  * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
  1675.  * returned in "attr_list" is returned in "num_attributes".
  1676.  */
  1677. VAStatus
  1678. i965_QueryDisplayAttributes(
  1679.     VADriverContextP    ctx,
  1680.     VADisplayAttribute *attribs,        /* out */
  1681.     int                *num_attribs_ptr /* out */
  1682. )
  1683. {
  1684.     const int num_attribs = ARRAY_ELEMS(i965_display_attributes);
  1685.  
  1686.     if (attribs && num_attribs > 0)
  1687.         memcpy(attribs, i965_display_attributes, sizeof(i965_display_attributes));
  1688.  
  1689.     if (num_attribs_ptr)
  1690.         *num_attribs_ptr = num_attribs;
  1691.  
  1692.     return VA_STATUS_SUCCESS;
  1693. }
  1694.  
  1695. /*
  1696.  * Get display attributes
  1697.  * This function returns the current attribute values in "attr_list".
  1698.  * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
  1699.  * from vaQueryDisplayAttributes() can have their values retrieved.
  1700.  */
  1701. VAStatus
  1702. i965_GetDisplayAttributes(
  1703.     VADriverContextP    ctx,
  1704.     VADisplayAttribute *attribs,        /* inout */
  1705.     int                 num_attribs     /* in */
  1706. )
  1707. {
  1708.     int i;
  1709.  
  1710.     for (i = 0; i < num_attribs; i++) {
  1711.         VADisplayAttribute *src_attrib, * const dst_attrib = &attribs[i];
  1712.  
  1713.         src_attrib = get_display_attribute(ctx, dst_attrib->type);
  1714.         if (src_attrib && (src_attrib->flags & VA_DISPLAY_ATTRIB_GETTABLE)) {
  1715.             dst_attrib->min_value = src_attrib->min_value;
  1716.             dst_attrib->max_value = src_attrib->max_value;
  1717.             dst_attrib->value     = src_attrib->value;
  1718.         }
  1719.         else
  1720.             dst_attrib->flags = VA_DISPLAY_ATTRIB_NOT_SUPPORTED;
  1721.     }
  1722.     return VA_STATUS_SUCCESS;
  1723. }
  1724.  
  1725. /*
  1726.  * Set display attributes
  1727.  * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
  1728.  * from vaQueryDisplayAttributes() can be set.  If the attribute is not settable or
  1729.  * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
  1730.  */
  1731. VAStatus
  1732. i965_SetDisplayAttributes(
  1733.     VADriverContextP    ctx,
  1734.     VADisplayAttribute *attribs,        /* in */
  1735.     int                 num_attribs     /* in */
  1736. )
  1737. {
  1738.     int i;
  1739.  
  1740.     for (i = 0; i < num_attribs; i++) {
  1741.         VADisplayAttribute *dst_attrib, * const src_attrib = &attribs[i];
  1742.  
  1743.         dst_attrib = get_display_attribute(ctx, src_attrib->type);
  1744.         if (!dst_attrib)
  1745.             return VA_STATUS_ERROR_ATTR_NOT_SUPPORTED;
  1746.  
  1747.         if (!(dst_attrib->flags & VA_DISPLAY_ATTRIB_SETTABLE))
  1748.             continue;
  1749.  
  1750.         if (src_attrib->value < dst_attrib->min_value ||
  1751.             src_attrib->value > dst_attrib->max_value)
  1752.             return VA_STATUS_ERROR_INVALID_PARAMETER;
  1753.  
  1754.         dst_attrib->value = src_attrib->value;
  1755.         /* XXX: track modified attributes through timestamps */
  1756.     }
  1757.     return VA_STATUS_SUCCESS;
  1758. }
  1759.  
  1760. VAStatus
  1761. i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
  1762.                             VASurfaceID surface,
  1763.                             void **buffer,              /* out */
  1764.                             unsigned int *stride)       /* out */
  1765. {
  1766.     /* TODO */
  1767.     return VA_STATUS_ERROR_UNIMPLEMENTED;
  1768. }
  1769.  
  1770. static VAStatus
  1771. i965_Init(VADriverContextP ctx)
  1772. {
  1773.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1774.  
  1775.    printf("%s context %p\n", __FUNCTION__, ctx);
  1776.  
  1777.  
  1778.     if (intel_driver_init(ctx) == False)
  1779.         return VA_STATUS_ERROR_UNKNOWN;
  1780.  
  1781.     printf("set codec info\n");
  1782.    
  1783.     if (IS_HASWELL(i965->intel.device_id))
  1784.         i965->codec_info = &gen75_hw_codec_info;
  1785.     else if (IS_G4X(i965->intel.device_id))
  1786.         i965->codec_info = &g4x_hw_codec_info;
  1787.     else if (IS_IRONLAKE(i965->intel.device_id))
  1788.         i965->codec_info = &ironlake_hw_codec_info;
  1789.     else if (IS_GEN6(i965->intel.device_id))
  1790.         i965->codec_info = &gen6_hw_codec_info;
  1791.     else if (IS_GEN7(i965->intel.device_id))
  1792.         i965->codec_info = &gen7_hw_codec_info;
  1793.     else
  1794.         return VA_STATUS_ERROR_UNKNOWN;
  1795.  
  1796.     printf("codec info %p\n", i965->codec_info);
  1797.      
  1798.     i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER, 0);
  1799.  
  1800.     if (!i965_display_attributes_init(ctx))
  1801.         return VA_STATUS_ERROR_UNKNOWN;
  1802.  
  1803.     if (i965_post_processing_init(ctx) == False)
  1804.         return VA_STATUS_ERROR_UNKNOWN;
  1805.  
  1806.     if (i965_render_init(ctx) == False)
  1807.         return VA_STATUS_ERROR_UNKNOWN;
  1808.  
  1809. #ifdef HAVE_VA_WAYLAND
  1810.     if (IS_VA_WAYLAND(ctx) && !i965_output_wayland_init(ctx))
  1811.         return VA_STATUS_ERROR_UNKNOWN;
  1812. #endif
  1813.  
  1814. #ifdef HAVE_VA_X11
  1815.     if (IS_VA_X11(ctx) && !i965_output_dri_init(ctx))
  1816.         return VA_STATUS_ERROR_UNKNOWN;
  1817. #endif
  1818.  
  1819.     _i965InitMutex(&i965->render_mutex);
  1820.  
  1821.     printf("device_id=%x has_exec2=%d has_bsd=%d has_blt=%d\n",
  1822.             i965->intel.device_id, i965->intel.has_exec2,
  1823.             i965->intel.has_bsd, i965->intel.has_blt);
  1824.  
  1825.     printf("%s done\n", __FUNCTION__);
  1826.    
  1827.     return VA_STATUS_SUCCESS;
  1828. }
  1829.  
  1830. static void
  1831. i965_destroy_heap(struct object_heap *heap,
  1832.                   void (*func)(struct object_heap *heap, struct object_base *object))
  1833. {
  1834.     struct object_base *object;
  1835.     object_heap_iterator iter;
  1836.  
  1837.     object = object_heap_first(heap, &iter);
  1838.  
  1839.     while (object) {
  1840.         if (func)
  1841.             func(heap, object);
  1842.  
  1843.         object = object_heap_next(heap, &iter);
  1844.     }
  1845.  
  1846.     object_heap_destroy(heap);
  1847. }
  1848.  
  1849.  
  1850. VAStatus
  1851. i965_DestroyImage(VADriverContextP ctx, VAImageID image);
  1852.  
  1853. VAStatus
  1854. i965_CreateImage(VADriverContextP ctx,
  1855.                  VAImageFormat *format,
  1856.                  int width,
  1857.                  int height,
  1858.                  VAImage *out_image)        /* out */
  1859. {
  1860.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1861.     struct object_image *obj_image;
  1862.     VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
  1863.     VAImageID image_id;
  1864.     unsigned int width2, height2, size2, size;
  1865.  
  1866.     out_image->image_id = VA_INVALID_ID;
  1867.     out_image->buf      = VA_INVALID_ID;
  1868.  
  1869.     image_id = NEW_IMAGE_ID();
  1870.     if (image_id == VA_INVALID_ID)
  1871.         return VA_STATUS_ERROR_ALLOCATION_FAILED;
  1872.  
  1873.     obj_image = IMAGE(image_id);
  1874.     if (!obj_image)
  1875.         return VA_STATUS_ERROR_ALLOCATION_FAILED;
  1876.     obj_image->bo         = NULL;
  1877.     obj_image->palette    = NULL;
  1878.     obj_image->derived_surface = VA_INVALID_ID;
  1879.  
  1880.     VAImage * const image = &obj_image->image;
  1881.     image->image_id       = image_id;
  1882.     image->buf            = VA_INVALID_ID;
  1883.  
  1884.     size    = width * height;
  1885.     width2  = (width  + 1) / 2;
  1886.     height2 = (height + 1) / 2;
  1887.     size2   = width2 * height2;
  1888.  
  1889.     image->num_palette_entries = 0;
  1890.     image->entry_bytes         = 0;
  1891.     memset(image->component_order, 0, sizeof(image->component_order));
  1892.  
  1893.     switch (format->fourcc) {
  1894.     case VA_FOURCC('I','A','4','4'):
  1895.     case VA_FOURCC('A','I','4','4'):
  1896.         image->num_planes = 1;
  1897.         image->pitches[0] = width;
  1898.         image->offsets[0] = 0;
  1899.         image->data_size  = image->offsets[0] + image->pitches[0] * height;
  1900.         image->num_palette_entries = 16;
  1901.         image->entry_bytes         = 3;
  1902.         image->component_order[0]  = 'R';
  1903.         image->component_order[1]  = 'G';
  1904.         image->component_order[2]  = 'B';
  1905.         break;
  1906.     case VA_FOURCC('I','A','8','8'):
  1907.     case VA_FOURCC('A','I','8','8'):
  1908.         image->num_planes = 1;
  1909.         image->pitches[0] = width * 2;
  1910.         image->offsets[0] = 0;
  1911.         image->data_size  = image->offsets[0] + image->pitches[0] * height;
  1912.         image->num_palette_entries = 256;
  1913.         image->entry_bytes         = 3;
  1914.         image->component_order[0]  = 'R';
  1915.         image->component_order[1]  = 'G';
  1916.         image->component_order[2]  = 'B';
  1917.         break;
  1918.     case VA_FOURCC('A','R','G','B'):
  1919.     case VA_FOURCC('A','B','G','R'):
  1920.     case VA_FOURCC('B','G','R','A'):
  1921.     case VA_FOURCC('R','G','B','A'):
  1922.         image->num_planes = 1;
  1923.         image->pitches[0] = width * 4;
  1924.         image->offsets[0] = 0;
  1925.         image->data_size  = image->offsets[0] + image->pitches[0] * height;
  1926.         break;
  1927.     case VA_FOURCC('Y','V','1','2'):
  1928.         image->num_planes = 3;
  1929.         image->pitches[0] = width;
  1930.         image->offsets[0] = 0;
  1931.         image->pitches[1] = width2;
  1932.         image->offsets[1] = size + size2;
  1933.         image->pitches[2] = width2;
  1934.         image->offsets[2] = size;
  1935.         image->data_size  = size + 2 * size2;
  1936.         break;
  1937.     case VA_FOURCC('I','4','2','0'):
  1938.         image->num_planes = 3;
  1939.         image->pitches[0] = width;
  1940.         image->offsets[0] = 0;
  1941.         image->pitches[1] = width2;
  1942.         image->offsets[1] = size;
  1943.         image->pitches[2] = width2;
  1944.         image->offsets[2] = size + size2;
  1945.         image->data_size  = size + 2 * size2;
  1946.         break;
  1947.     case VA_FOURCC('N','V','1','2'):
  1948.         image->num_planes = 2;
  1949.         image->pitches[0] = width;
  1950.         image->offsets[0] = 0;
  1951.         image->pitches[1] = width;
  1952.         image->offsets[1] = size;
  1953.         image->data_size  = size + 2 * size2;
  1954.         break;
  1955.     default:
  1956.         goto error;
  1957.     }
  1958.  
  1959.     va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
  1960.                                   image->data_size, 1, NULL, &image->buf);
  1961.     if (va_status != VA_STATUS_SUCCESS)
  1962.         goto error;
  1963.  
  1964.     obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
  1965.     dri_bo_reference(obj_image->bo);
  1966.  
  1967.     if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
  1968.         obj_image->palette = malloc(image->num_palette_entries * sizeof(*obj_image->palette));
  1969.         if (!obj_image->palette)
  1970.             goto error;
  1971.     }
  1972.  
  1973.     image->image_id             = image_id;
  1974.     image->format               = *format;
  1975.     image->width                = width;
  1976.     image->height               = height;
  1977.  
  1978.     *out_image                  = *image;
  1979.     return VA_STATUS_SUCCESS;
  1980.  
  1981.  error:
  1982.     i965_DestroyImage(ctx, image_id);
  1983.     return va_status;
  1984. }
  1985.  
  1986. void
  1987. i965_check_alloc_surface_bo(VADriverContextP ctx,
  1988.                             struct object_surface *obj_surface,
  1989.                             int tiled,
  1990.                             unsigned int fourcc,
  1991.                             unsigned int subsampling)
  1992. {
  1993.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  1994.     int region_width, region_height;
  1995.  
  1996.     if (obj_surface->bo) {
  1997.         assert(obj_surface->fourcc);
  1998.         assert(obj_surface->fourcc == fourcc);
  1999.         assert(obj_surface->subsampling == subsampling);
  2000.         return;
  2001.     }
  2002.  
  2003.     obj_surface->x_cb_offset = 0; /* X offset is always 0 */
  2004.     obj_surface->x_cr_offset = 0;
  2005.  
  2006.     if (tiled) {
  2007.         assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
  2008.                fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
  2009.                fourcc == VA_FOURCC('I', 'M', 'C', '3'));
  2010.  
  2011.         obj_surface->width = ALIGN(obj_surface->orig_width, 128);
  2012.         obj_surface->height = ALIGN(obj_surface->orig_height, 32);
  2013.         obj_surface->cb_cr_pitch = obj_surface->width;
  2014.         region_width = obj_surface->width;
  2015.         region_height = obj_surface->height;
  2016.  
  2017.         if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
  2018.             assert(subsampling == SUBSAMPLE_YUV420);
  2019.             obj_surface->y_cb_offset = obj_surface->height;
  2020.             obj_surface->y_cr_offset = obj_surface->height;
  2021.             obj_surface->cb_cr_width = obj_surface->orig_width / 2;
  2022.             obj_surface->cb_cr_height = obj_surface->orig_height / 2;
  2023.             region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
  2024.         } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
  2025.                    fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
  2026.             switch (subsampling) {
  2027.             case SUBSAMPLE_YUV400:
  2028.                 obj_surface->cb_cr_width = 0;
  2029.                 obj_surface->cb_cr_height = 0;
  2030.                 break;
  2031.  
  2032.             case SUBSAMPLE_YUV420:
  2033.                 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
  2034.                 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
  2035.                 break;
  2036.  
  2037.             case SUBSAMPLE_YUV422H:
  2038.                 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
  2039.                 obj_surface->cb_cr_height = obj_surface->orig_height;
  2040.                 break;
  2041.  
  2042.             case SUBSAMPLE_YUV422V:
  2043.                 obj_surface->cb_cr_width = obj_surface->orig_width;
  2044.                 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
  2045.                 break;
  2046.  
  2047.             case SUBSAMPLE_YUV444:
  2048.                 obj_surface->cb_cr_width = obj_surface->orig_width;
  2049.                 obj_surface->cb_cr_height = obj_surface->orig_height;
  2050.                 break;
  2051.  
  2052.             case SUBSAMPLE_YUV411:
  2053.                 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
  2054.                 obj_surface->cb_cr_height = obj_surface->orig_height;
  2055.                 break;
  2056.  
  2057.             default:
  2058.                 assert(0);
  2059.                 break;
  2060.             }
  2061.  
  2062.             region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
  2063.  
  2064.             if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
  2065.                 obj_surface->y_cr_offset = obj_surface->height;
  2066.                 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
  2067.             } else {
  2068.                 obj_surface->y_cb_offset = obj_surface->height;
  2069.                 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
  2070.             }
  2071.         }
  2072.     } else {
  2073.         assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
  2074.                fourcc != VA_FOURCC('I', 'M', 'C', '3'));
  2075.         assert(subsampling == SUBSAMPLE_YUV420);
  2076.  
  2077.         region_width = obj_surface->width;
  2078.         region_height = obj_surface->height;
  2079.  
  2080.         switch (fourcc) {
  2081.         case VA_FOURCC('N', 'V', '1', '2'):
  2082.             obj_surface->y_cb_offset = obj_surface->height;
  2083.             obj_surface->y_cr_offset = obj_surface->height;
  2084.             obj_surface->cb_cr_width = obj_surface->orig_width / 2;
  2085.             obj_surface->cb_cr_height = obj_surface->orig_height / 2;
  2086.             obj_surface->cb_cr_pitch = obj_surface->width;
  2087.             region_height = obj_surface->height + obj_surface->height / 2;
  2088.             break;
  2089.  
  2090.         case VA_FOURCC('Y', 'V', '1', '2'):
  2091.         case VA_FOURCC('I', '4', '2', '0'):
  2092.             if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
  2093.                 obj_surface->y_cr_offset = obj_surface->height;
  2094.                 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
  2095.             } else {
  2096.                 obj_surface->y_cb_offset = obj_surface->height;
  2097.                 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
  2098.             }
  2099.  
  2100.             obj_surface->cb_cr_width = obj_surface->orig_width / 2;
  2101.             obj_surface->cb_cr_height = obj_surface->orig_height / 2;
  2102.             obj_surface->cb_cr_pitch = obj_surface->width / 2;
  2103.             region_height = obj_surface->height + obj_surface->height / 2;
  2104.             break;
  2105.  
  2106.         default:
  2107.             assert(0);
  2108.             break;
  2109.         }
  2110.     }
  2111.  
  2112.     obj_surface->size = ALIGN(region_width * region_height, 0x1000);
  2113.  
  2114. #if 0
  2115.     if (tiled) {
  2116.         uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
  2117.         unsigned long pitch;
  2118.  
  2119.         obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
  2120.                                                    "vaapi surface",
  2121.                                                    region_width,
  2122.                                                    region_height,
  2123.                                                    1,
  2124.                                                    &tiling_mode,
  2125.                                                    &pitch,
  2126.                                                    0);
  2127.         assert(tiling_mode == I915_TILING_Y);
  2128.         assert(pitch == obj_surface->width);
  2129.     } else {
  2130. #endif
  2131.        
  2132.         obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
  2133.                                        "vaapi surface",
  2134.                                        obj_surface->size,
  2135.                                        0x1000);
  2136. //    }
  2137.  
  2138.     obj_surface->fourcc = fourcc;
  2139.     obj_surface->subsampling = subsampling;
  2140.     assert(obj_surface->bo);
  2141. }
  2142.  
  2143. VAStatus i965_DeriveImage(VADriverContextP ctx,
  2144.                           VASurfaceID surface,
  2145.                           VAImage *out_image)        /* out */
  2146. {
  2147.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2148.     struct i965_render_state *render_state = &i965->render_state;
  2149.     struct object_image *obj_image;
  2150.     struct object_surface *obj_surface;
  2151.     VAImageID image_id;
  2152.     unsigned int w_pitch, h_pitch;
  2153.     VAStatus va_status;
  2154.  
  2155.     out_image->image_id = VA_INVALID_ID;
  2156.     obj_surface = SURFACE(surface);
  2157.  
  2158.     if (!obj_surface)
  2159.         return VA_STATUS_ERROR_INVALID_SURFACE;
  2160.  
  2161.     w_pitch = obj_surface->width;
  2162.     h_pitch = obj_surface->height;
  2163.  
  2164.     image_id = NEW_IMAGE_ID();
  2165.  
  2166.     if (image_id == VA_INVALID_ID)
  2167.         return VA_STATUS_ERROR_ALLOCATION_FAILED;
  2168.  
  2169.     obj_image = IMAGE(image_id);
  2170.  
  2171.     if (!obj_image)
  2172.         return VA_STATUS_ERROR_ALLOCATION_FAILED;
  2173.  
  2174.     obj_image->bo = NULL;
  2175.     obj_image->palette = NULL;
  2176.     obj_image->derived_surface = VA_INVALID_ID;
  2177.  
  2178.     VAImage * const image = &obj_image->image;
  2179.  
  2180.     memset(image, 0, sizeof(*image));
  2181.     image->image_id = image_id;
  2182.     image->buf = VA_INVALID_ID;
  2183.     image->num_palette_entries = 0;
  2184.     image->entry_bytes = 0;
  2185.     image->width = obj_surface->orig_width;
  2186.     image->height = obj_surface->orig_height;
  2187.     image->data_size = obj_surface->size;
  2188.  
  2189.     if (!render_state->inited) {
  2190.             image->format.fourcc = VA_FOURCC('Y','V','1','2');
  2191.             image->format.byte_order = VA_LSB_FIRST;
  2192.             image->format.bits_per_pixel = 12;
  2193.             image->num_planes = 3;
  2194.             image->pitches[0] = w_pitch;
  2195.             image->offsets[0] = 0;
  2196.             image->pitches[1] = w_pitch / 2;
  2197.             image->offsets[1] = w_pitch * h_pitch;
  2198.             image->pitches[2] = w_pitch / 2;
  2199.             image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
  2200.     } else {
  2201.         if (render_state->interleaved_uv) {
  2202.             image->format.fourcc = VA_FOURCC('N','V','1','2');
  2203.             image->format.byte_order = VA_LSB_FIRST;
  2204.             image->format.bits_per_pixel = 12;
  2205.             image->num_planes = 2;
  2206.             image->pitches[0] = w_pitch;
  2207.             image->offsets[0] = 0;
  2208.             image->pitches[1] = w_pitch;
  2209.             image->offsets[1] = w_pitch * h_pitch;
  2210.         } else {
  2211.             image->format.fourcc = VA_FOURCC('I','4','2','0');
  2212.             image->format.byte_order = VA_LSB_FIRST;
  2213.             image->format.bits_per_pixel = 12;
  2214.             image->num_planes = 3;
  2215.             image->pitches[0] = w_pitch;
  2216.             image->offsets[0] = 0;
  2217.             image->pitches[1] = w_pitch / 2;
  2218.             image->offsets[1] = w_pitch * h_pitch;
  2219.             image->pitches[2] = w_pitch / 2;
  2220.             image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
  2221.         }
  2222.     }
  2223.  
  2224.     i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), image->format.fourcc, SUBSAMPLE_YUV420);
  2225.     va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
  2226.                                             obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
  2227.     if (va_status != VA_STATUS_SUCCESS)
  2228.         goto error;
  2229.  
  2230.     obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
  2231.     dri_bo_reference(obj_image->bo);
  2232.  
  2233.     if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
  2234.         obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
  2235.         if (!obj_image->palette) {
  2236.             va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
  2237.             goto error;
  2238.         }
  2239.     }
  2240.  
  2241.     *out_image = *image;
  2242.     obj_surface->flags |= SURFACE_DERIVED;
  2243.     obj_image->derived_surface = surface;
  2244.  
  2245.     return VA_STATUS_SUCCESS;
  2246.  
  2247.  error:
  2248.     i965_DestroyImage(ctx, image_id);
  2249.     return va_status;
  2250. }
  2251.  
  2252. static void
  2253. i965_destroy_image(struct object_heap *heap, struct object_base *obj)
  2254. {
  2255.     object_heap_free(heap, obj);
  2256. }
  2257.  
  2258.  
  2259. VAStatus
  2260. i965_DestroyImage(VADriverContextP ctx, VAImageID image)
  2261. {
  2262.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2263.     struct object_image *obj_image = IMAGE(image);
  2264.     struct object_surface *obj_surface;
  2265.  
  2266.     if (!obj_image)
  2267.         return VA_STATUS_SUCCESS;
  2268.  
  2269.     dri_bo_unreference(obj_image->bo);
  2270.     obj_image->bo = NULL;
  2271.  
  2272.     if (obj_image->image.buf != VA_INVALID_ID) {
  2273.         i965_DestroyBuffer(ctx, obj_image->image.buf);
  2274.         obj_image->image.buf = VA_INVALID_ID;
  2275.     }
  2276.  
  2277.     if (obj_image->palette) {
  2278.         free(obj_image->palette);
  2279.         obj_image->palette = NULL;
  2280.     }
  2281.  
  2282.     obj_surface = SURFACE(obj_image->derived_surface);
  2283.  
  2284.     if (obj_surface) {
  2285.         obj_surface->flags &= ~SURFACE_DERIVED;
  2286.     }
  2287.  
  2288.     i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
  2289.  
  2290.     return VA_STATUS_SUCCESS;
  2291. }
  2292.  
  2293. /*
  2294.  * pointer to an array holding the palette data.  The size of the array is
  2295.  * num_palette_entries * entry_bytes in size.  The order of the components
  2296.  * in the palette is described by the component_order in VASubpicture struct
  2297.  */
  2298. VAStatus
  2299. i965_SetImagePalette(VADriverContextP ctx,
  2300.                      VAImageID image,
  2301.                      unsigned char *palette)
  2302. {
  2303.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2304.     unsigned int i;
  2305.  
  2306.     struct object_image *obj_image = IMAGE(image);
  2307.     if (!obj_image)
  2308.         return VA_STATUS_ERROR_INVALID_IMAGE;
  2309.  
  2310.     if (!obj_image->palette)
  2311.         return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
  2312.  
  2313.     for (i = 0; i < obj_image->image.num_palette_entries; i++)
  2314.         obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
  2315.                                  ((unsigned int)palette[3*i + 1] <<  8) |
  2316.                                  (unsigned int)palette[3*i + 2]);
  2317.     return VA_STATUS_SUCCESS;
  2318. }
  2319.  
  2320. static inline void
  2321. memcpy_pic(uint8_t *dst, unsigned int dst_stride,
  2322.            const uint8_t *src, unsigned int src_stride,
  2323.            unsigned int len, unsigned int height)
  2324. {
  2325.     unsigned int i;
  2326.  
  2327.     for (i = 0; i < height; i++) {
  2328.         memcpy(dst, src, len);
  2329.         dst += dst_stride;
  2330.         src += src_stride;
  2331.     }
  2332. }
  2333.  
  2334. static void
  2335. get_image_i420(struct object_image *obj_image, uint8_t *image_data,
  2336.                struct object_surface *obj_surface,
  2337.                const VARectangle *rect)
  2338. {
  2339.     uint8_t *dst[3], *src[3];
  2340.     const int Y = 0;
  2341.     const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
  2342.     const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
  2343.     unsigned int tiling, swizzle;
  2344.  
  2345.     if (!obj_surface->bo)
  2346.         return;
  2347.  
  2348.     assert(obj_surface->fourcc);
  2349.     dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
  2350.  
  2351.     if (tiling != I915_TILING_NONE)
  2352.         drm_intel_gem_bo_map_gtt(obj_surface->bo);
  2353.     else
  2354.         dri_bo_map(obj_surface->bo, 0);
  2355.  
  2356.     if (!obj_surface->bo->virtual)
  2357.         return;
  2358.  
  2359.     /* Dest VA image has either I420 or YV12 format.
  2360.        Source VA surface alway has I420 format */
  2361.     dst[Y] = image_data + obj_image->image.offsets[Y];
  2362.     src[0] = (uint8_t *)obj_surface->bo->virtual;
  2363.     dst[U] = image_data + obj_image->image.offsets[U];
  2364.     src[1] = src[0] + obj_surface->width * obj_surface->height;
  2365.     dst[V] = image_data + obj_image->image.offsets[V];
  2366.     src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
  2367.  
  2368.     /* Y plane */
  2369.     dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
  2370.     src[0] += rect->y * obj_surface->width + rect->x;
  2371.     memcpy_pic(dst[Y], obj_image->image.pitches[Y],
  2372.                src[0], obj_surface->width,
  2373.                rect->width, rect->height);
  2374.  
  2375.     /* U plane */
  2376.     dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
  2377.     src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
  2378.     memcpy_pic(dst[U], obj_image->image.pitches[U],
  2379.                src[1], obj_surface->width / 2,
  2380.                rect->width / 2, rect->height / 2);
  2381.  
  2382.     /* V plane */
  2383.     dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
  2384.     src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
  2385.     memcpy_pic(dst[V], obj_image->image.pitches[V],
  2386.                src[2], obj_surface->width / 2,
  2387.                rect->width / 2, rect->height / 2);
  2388.  
  2389.     if (tiling != I915_TILING_NONE)
  2390.         drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
  2391.     else
  2392.         dri_bo_unmap(obj_surface->bo);
  2393. }
  2394.  
  2395. static void
  2396. get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
  2397.                struct object_surface *obj_surface,
  2398.                const VARectangle *rect)
  2399. {
  2400.     uint8_t *dst[2], *src[2];
  2401.     unsigned int tiling, swizzle;
  2402.  
  2403.     if (!obj_surface->bo)
  2404.         return;
  2405.  
  2406.     assert(obj_surface->fourcc);
  2407.     dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
  2408.  
  2409.     if (tiling != I915_TILING_NONE)
  2410.         drm_intel_gem_bo_map_gtt(obj_surface->bo);
  2411.     else
  2412.         dri_bo_map(obj_surface->bo, 0);
  2413.  
  2414.     if (!obj_surface->bo->virtual)
  2415.         return;
  2416.  
  2417.     /* Both dest VA image and source surface have NV12 format */
  2418.     dst[0] = image_data + obj_image->image.offsets[0];
  2419.     src[0] = (uint8_t *)obj_surface->bo->virtual;
  2420.     dst[1] = image_data + obj_image->image.offsets[1];
  2421.     src[1] = src[0] + obj_surface->width * obj_surface->height;
  2422.  
  2423.     /* Y plane */
  2424.     dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
  2425.     src[0] += rect->y * obj_surface->width + rect->x;
  2426.     memcpy_pic(dst[0], obj_image->image.pitches[0],
  2427.                src[0], obj_surface->width,
  2428.                rect->width, rect->height);
  2429.  
  2430.     /* UV plane */
  2431.     dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
  2432.     src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
  2433.     memcpy_pic(dst[1], obj_image->image.pitches[1],
  2434.                src[1], obj_surface->width,
  2435.                rect->width, rect->height / 2);
  2436.  
  2437.     if (tiling != I915_TILING_NONE)
  2438.         drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
  2439.     else
  2440.         dri_bo_unmap(obj_surface->bo);
  2441. }
  2442.  
  2443. VAStatus
  2444. i965_GetImage(VADriverContextP ctx,
  2445.               VASurfaceID surface,
  2446.               int x,   /* coordinates of the upper left source pixel */
  2447.               int y,
  2448.               unsigned int width,      /* width and height of the region */
  2449.               unsigned int height,
  2450.               VAImageID image)
  2451. {
  2452.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2453.     struct i965_render_state *render_state = &i965->render_state;
  2454.  
  2455.     struct object_surface *obj_surface = SURFACE(surface);
  2456.     if (!obj_surface)
  2457.         return VA_STATUS_ERROR_INVALID_SURFACE;
  2458.  
  2459.     struct object_image *obj_image = IMAGE(image);
  2460.     if (!obj_image)
  2461.         return VA_STATUS_ERROR_INVALID_IMAGE;
  2462.  
  2463.     if (x < 0 || y < 0)
  2464.         return VA_STATUS_ERROR_INVALID_PARAMETER;
  2465.     if (x + width > obj_surface->orig_width ||
  2466.         y + height > obj_surface->orig_height)
  2467.         return VA_STATUS_ERROR_INVALID_PARAMETER;
  2468.     if (x + width > obj_image->image.width ||
  2469.         y + height > obj_image->image.height)
  2470.         return VA_STATUS_ERROR_INVALID_PARAMETER;
  2471.  
  2472.     VAStatus va_status;
  2473.     void *image_data = NULL;
  2474.  
  2475.     va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
  2476.     if (va_status != VA_STATUS_SUCCESS)
  2477.         return va_status;
  2478.  
  2479.     VARectangle rect;
  2480.     rect.x = x;
  2481.     rect.y = y;
  2482.     rect.width = width;
  2483.     rect.height = height;
  2484.  
  2485.     switch (obj_image->image.format.fourcc) {
  2486.     case VA_FOURCC('Y','V','1','2'):
  2487.     case VA_FOURCC('I','4','2','0'):
  2488.         /* I420 is native format for MPEG-2 decoded surfaces */
  2489.         if (render_state->interleaved_uv)
  2490.             goto operation_failed;
  2491.         get_image_i420(obj_image, image_data, obj_surface, &rect);
  2492.         break;
  2493.     case VA_FOURCC('N','V','1','2'):
  2494.         /* NV12 is native format for H.264 decoded surfaces */
  2495.         if (!render_state->interleaved_uv)
  2496.             goto operation_failed;
  2497.         get_image_nv12(obj_image, image_data, obj_surface, &rect);
  2498.         break;
  2499.     default:
  2500.     operation_failed:
  2501.         va_status = VA_STATUS_ERROR_OPERATION_FAILED;
  2502.         break;
  2503.     }
  2504.  
  2505.     i965_UnmapBuffer(ctx, obj_image->image.buf);
  2506.     return va_status;
  2507. }
  2508.  
  2509. VAStatus
  2510. i965_PutSurface(VADriverContextP ctx,
  2511.                 VASurfaceID surface,
  2512.                 void *draw, /* X Drawable */
  2513.                 short srcx,
  2514.                 short srcy,
  2515.                 unsigned short srcw,
  2516.                 unsigned short srch,
  2517.                 short destx,
  2518.                 short desty,
  2519.                 unsigned short destw,
  2520.                 unsigned short desth,
  2521.                 VARectangle *cliprects, /* client supplied clip list */
  2522.                 unsigned int number_cliprects, /* number of clip rects in the clip list */
  2523.                 unsigned int flags) /* de-interlacing flags */
  2524. {
  2525. #ifdef HAVE_VA_X11
  2526.     if (IS_VA_X11(ctx)) {
  2527.         VARectangle src_rect, dst_rect;
  2528.  
  2529.         src_rect.x      = srcx;
  2530.         src_rect.y      = srcy;
  2531.         src_rect.width  = srcw;
  2532.         src_rect.height = srch;
  2533.  
  2534.         dst_rect.x      = destx;
  2535.         dst_rect.y      = desty;
  2536.         dst_rect.width  = destw;
  2537.         dst_rect.height = desth;
  2538.  
  2539.         return i965_put_surface_dri(ctx, surface, draw, &src_rect, &dst_rect,
  2540.                                     cliprects, number_cliprects, flags);
  2541.     }
  2542. #endif
  2543.     return VA_STATUS_ERROR_UNIMPLEMENTED;
  2544. }
  2545.  
  2546. VAStatus
  2547. i965_Terminate(VADriverContextP ctx)
  2548. {
  2549.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2550.  
  2551.     if (i965->batch)
  2552.         intel_batchbuffer_free(i965->batch);
  2553.  
  2554.     _i965DestroyMutex(&i965->render_mutex);
  2555.  
  2556. #ifdef HAVE_VA_X11
  2557.     if (IS_VA_X11(ctx))
  2558.         i965_output_dri_terminate(ctx);
  2559. #endif
  2560.  
  2561. #ifdef HAVE_VA_WAYLAND
  2562.     if (IS_VA_WAYLAND(ctx))
  2563.         i965_output_wayland_terminate(ctx);
  2564. #endif
  2565.  
  2566.     if (i965_render_terminate(ctx) == False)
  2567.         return VA_STATUS_ERROR_UNKNOWN;
  2568.  
  2569.     if (i965_post_processing_terminate(ctx) == False)
  2570.         return VA_STATUS_ERROR_UNKNOWN;
  2571.  
  2572.     i965_display_attributes_terminate(ctx);
  2573.  
  2574.     i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
  2575.     i965_destroy_heap(&i965->image_heap, i965_destroy_image);
  2576.     i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
  2577.     i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
  2578.     i965_destroy_heap(&i965->context_heap, i965_destroy_context);
  2579.     i965_destroy_heap(&i965->config_heap, i965_destroy_config);
  2580.  
  2581.     if (intel_driver_terminate(ctx) == False)
  2582.         return VA_STATUS_ERROR_UNKNOWN;
  2583.  
  2584.     free(ctx->pDriverData);
  2585.     ctx->pDriverData = NULL;
  2586.  
  2587.     return VA_STATUS_SUCCESS;
  2588. }
  2589.  
  2590. static VAStatus
  2591. i965_BufferInfo(
  2592.     VADriverContextP ctx,       /* in */
  2593.     VABufferID buf_id,          /* in */
  2594.     VABufferType *type,         /* out */
  2595.     unsigned int *size,         /* out */
  2596.     unsigned int *num_elements  /* out */
  2597. )
  2598. {
  2599.     struct i965_driver_data *i965 = NULL;
  2600.     struct object_buffer *obj_buffer = NULL;
  2601.  
  2602.     i965 = i965_driver_data(ctx);
  2603.     obj_buffer = BUFFER(buf_id);
  2604.  
  2605.     *type = obj_buffer->type;
  2606.     *size = obj_buffer->size_element;
  2607.     *num_elements = obj_buffer->num_elements;
  2608.  
  2609.     return VA_STATUS_SUCCESS;
  2610. }
  2611.  
  2612. static VAStatus
  2613. i965_LockSurface(
  2614.     VADriverContextP ctx,           /* in */
  2615.     VASurfaceID surface,            /* in */
  2616.     unsigned int *fourcc,           /* out */
  2617.     unsigned int *luma_stride,      /* out */
  2618.     unsigned int *chroma_u_stride,  /* out */
  2619.     unsigned int *chroma_v_stride,  /* out */
  2620.     unsigned int *luma_offset,      /* out */
  2621.     unsigned int *chroma_u_offset,  /* out */
  2622.     unsigned int *chroma_v_offset,  /* out */
  2623.     unsigned int *buffer_name,      /* out */
  2624.     void **buffer                   /* out */
  2625. )
  2626. {
  2627.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  2628.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2629.     struct object_surface *obj_surface = NULL;
  2630.     VAImage tmpImage;
  2631.  
  2632.     assert(fourcc);
  2633.     assert(luma_stride);
  2634.     assert(chroma_u_stride);
  2635.     assert(chroma_v_stride);
  2636.     assert(luma_offset);
  2637.     assert(chroma_u_offset);
  2638.     assert(chroma_v_offset);
  2639.     assert(buffer_name);
  2640.     assert(buffer);
  2641.  
  2642.     tmpImage.image_id = VA_INVALID_ID;
  2643.  
  2644.     obj_surface = SURFACE(surface);
  2645.     if (obj_surface == NULL) {
  2646.         // Surface is absent.
  2647.         vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
  2648.         goto error;
  2649.     }
  2650.  
  2651.     // Lock functionality is absent now.
  2652.     if (obj_surface->locked_image_id != VA_INVALID_ID) {
  2653.         // Surface is locked already.
  2654.         vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
  2655.         goto error;
  2656.     }
  2657.  
  2658.     vaStatus = i965_DeriveImage(
  2659.         ctx,
  2660.         surface,
  2661.         &tmpImage);
  2662.     if (vaStatus != VA_STATUS_SUCCESS) {
  2663.         goto error;
  2664.     }
  2665.  
  2666.     obj_surface->locked_image_id = tmpImage.image_id;
  2667.  
  2668.     vaStatus = i965_MapBuffer(
  2669.         ctx,
  2670.         tmpImage.buf,
  2671.         buffer);
  2672.     if (vaStatus != VA_STATUS_SUCCESS) {
  2673.         goto error;
  2674.     }
  2675.  
  2676.     *fourcc = tmpImage.format.fourcc;
  2677.     *luma_offset = tmpImage.offsets[0];
  2678.     *luma_stride = tmpImage.pitches[0];
  2679.     *chroma_u_offset = tmpImage.offsets[1];
  2680.     *chroma_u_stride = tmpImage.pitches[1];
  2681.     *chroma_v_offset = tmpImage.offsets[2];
  2682.     *chroma_v_stride = tmpImage.pitches[2];
  2683.     *buffer_name = tmpImage.buf;
  2684.  
  2685. error:
  2686.     if (vaStatus != VA_STATUS_SUCCESS) {
  2687.         buffer = NULL;
  2688.     }
  2689.  
  2690.     return vaStatus;
  2691. }
  2692.  
  2693. static VAStatus
  2694. i965_UnlockSurface(
  2695.     VADriverContextP ctx,   /* in */
  2696.     VASurfaceID surface     /* in */
  2697. )
  2698. {
  2699.     VAStatus vaStatus = VA_STATUS_SUCCESS;
  2700.     struct i965_driver_data *i965 = i965_driver_data(ctx);
  2701.     struct object_image *locked_img = NULL;
  2702.     struct object_surface *obj_surface = NULL;
  2703.  
  2704.     obj_surface = SURFACE(surface);
  2705.  
  2706.     if (obj_surface == NULL) {
  2707.         vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;   // Surface is absent
  2708.         return vaStatus;
  2709.     }
  2710.     if (obj_surface->locked_image_id == VA_INVALID_ID) {
  2711.         vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;   // Surface is not locked
  2712.         return vaStatus;
  2713.     }
  2714.  
  2715.     locked_img = IMAGE(obj_surface->locked_image_id);
  2716.     if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
  2717.         // Work image was deallocated before i965_UnlockSurface()
  2718.         vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
  2719.         goto error;
  2720.     }
  2721.  
  2722.     vaStatus = i965_UnmapBuffer(
  2723.         ctx,
  2724.         locked_img->image.buf);
  2725.     if (vaStatus != VA_STATUS_SUCCESS) {
  2726.         goto error;
  2727.     }
  2728.  
  2729.     vaStatus = i965_DestroyImage(
  2730.         ctx,
  2731.         locked_img->image.image_id);
  2732.     if (vaStatus != VA_STATUS_SUCCESS) {
  2733.         goto error;
  2734.     }
  2735.  
  2736.     locked_img->image.image_id = VA_INVALID_ID;
  2737.  
  2738.  error:
  2739.     obj_surface->locked_image_id = VA_INVALID_ID;
  2740.  
  2741.     return vaStatus;
  2742. }
  2743.  
  2744.  
  2745.  
  2746. VAStatus DLL_EXPORT
  2747. VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
  2748.  
  2749. VAStatus
  2750. VA_DRIVER_INIT_FUNC(  VADriverContextP ctx )
  2751. {
  2752.     struct VADriverVTable * const vtable = ctx->vtable;
  2753.     struct i965_driver_data *i965;
  2754.     int result;
  2755.  
  2756.     printf("%s context %p\n", __FUNCTION__, ctx);
  2757.    
  2758.     ctx->version_major = VA_MAJOR_VERSION;
  2759.     ctx->version_minor = VA_MINOR_VERSION;
  2760.     ctx->max_profiles = I965_MAX_PROFILES;
  2761.     ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
  2762.     ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
  2763.     ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
  2764.     ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
  2765.     ctx->max_display_attributes = 1 + ARRAY_ELEMS(i965_display_attributes);
  2766.  
  2767.     vtable->vaTerminate = i965_Terminate;
  2768.     vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
  2769.     vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
  2770.     vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
  2771.     vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
  2772.     vtable->vaCreateConfig = i965_CreateConfig;
  2773.     vtable->vaDestroyConfig = i965_DestroyConfig;
  2774.     vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
  2775.     vtable->vaCreateSurfaces = i965_CreateSurfaces;
  2776.     vtable->vaDestroySurfaces = i965_DestroySurfaces;
  2777.     vtable->vaCreateContext = i965_CreateContext;
  2778.     vtable->vaDestroyContext = i965_DestroyContext;
  2779.     vtable->vaCreateBuffer = i965_CreateBuffer;
  2780.     vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
  2781.     vtable->vaMapBuffer = i965_MapBuffer;
  2782.     vtable->vaUnmapBuffer = i965_UnmapBuffer;
  2783.     vtable->vaDestroyBuffer = i965_DestroyBuffer;
  2784.     vtable->vaBeginPicture = i965_BeginPicture;
  2785.     vtable->vaRenderPicture = i965_RenderPicture;
  2786.     vtable->vaEndPicture = i965_EndPicture;
  2787.     vtable->vaSyncSurface = i965_SyncSurface;
  2788.     vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
  2789.     vtable->vaPutSurface = i965_PutSurface;
  2790.     vtable->vaQueryImageFormats = i965_QueryImageFormats;
  2791.     vtable->vaCreateImage = i965_CreateImage;
  2792.     vtable->vaDeriveImage = i965_DeriveImage;
  2793.     vtable->vaDestroyImage = i965_DestroyImage;
  2794.     vtable->vaSetImagePalette = i965_SetImagePalette;
  2795.     vtable->vaGetImage = i965_GetImage;
  2796.     vtable->vaPutImage = i965_PutImage;
  2797.     vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
  2798.     vtable->vaCreateSubpicture = i965_CreateSubpicture;
  2799.     vtable->vaDestroySubpicture = i965_DestroySubpicture;
  2800.     vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
  2801.     vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
  2802.     vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
  2803.     vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
  2804.     vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
  2805.     vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
  2806.     vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
  2807.     vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
  2808.     vtable->vaBufferInfo = i965_BufferInfo;
  2809.     vtable->vaLockSurface = i965_LockSurface;
  2810.     vtable->vaUnlockSurface = i965_UnlockSurface;
  2811.     //    vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
  2812.  
  2813.     i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
  2814.     assert(i965);
  2815.     ctx->pDriverData = (void *)i965;
  2816.  
  2817.     result = object_heap_init(&i965->config_heap,
  2818.                               sizeof(struct object_config),
  2819.                               CONFIG_ID_OFFSET);
  2820.     assert(result == 0);
  2821.  
  2822.     result = object_heap_init(&i965->context_heap,
  2823.                               sizeof(struct object_context),
  2824.                               CONTEXT_ID_OFFSET);
  2825.     assert(result == 0);
  2826.  
  2827.     result = object_heap_init(&i965->surface_heap,
  2828.                               sizeof(struct object_surface),
  2829.                               SURFACE_ID_OFFSET);
  2830.     assert(result == 0);
  2831.  
  2832.     result = object_heap_init(&i965->buffer_heap,
  2833.                               sizeof(struct object_buffer),
  2834.                               BUFFER_ID_OFFSET);
  2835.     assert(result == 0);
  2836.  
  2837.     result = object_heap_init(&i965->image_heap,
  2838.                               sizeof(struct object_image),
  2839.                               IMAGE_ID_OFFSET);
  2840.     assert(result == 0);
  2841.  
  2842.     result = object_heap_init(&i965->subpic_heap,
  2843.                               sizeof(struct object_subpic),
  2844.                               SUBPIC_ID_OFFSET);
  2845.     assert(result == 0);
  2846.  
  2847.     sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
  2848.             INTEL_STR_DRIVER_VENDOR,
  2849.             INTEL_STR_DRIVER_NAME,
  2850.             INTEL_DRIVER_MAJOR_VERSION,
  2851.             INTEL_DRIVER_MINOR_VERSION,
  2852.             INTEL_DRIVER_MICRO_VERSION);
  2853.  
  2854.     if (INTEL_DRIVER_PRE_VERSION > 0) {
  2855.         const int len = strlen(i965->va_vendor);
  2856.         sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
  2857.     }
  2858.     ctx->str_vendor = i965->va_vendor;
  2859.  
  2860.     return i965_Init(ctx);
  2861. }
  2862.