Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /**************************************************************************
  2.  *
  3.  * Copyright 2010 Thomas Balling Sørensen.
  4.  * Copyright 2011 Christian König.
  5.  * All Rights Reserved.
  6.  *
  7.  * Permission is hereby granted, free of charge, to any person obtaining a
  8.  * copy of this software and associated documentation files (the
  9.  * "Software"), to deal in the Software without restriction, including
  10.  * without limitation the rights to use, copy, modify, merge, publish,
  11.  * distribute, sub license, and/or sell copies of the Software, and to
  12.  * permit persons to whom the Software is furnished to do so, subject to
  13.  * the following conditions:
  14.  *
  15.  * The above copyright notice and this permission notice (including the
  16.  * next paragraph) shall be included in all copies or substantial portions
  17.  * of the Software.
  18.  *
  19.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  20.  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  21.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  22.  * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
  23.  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  24.  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  25.  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  26.  *
  27.  **************************************************************************/
  28.  
  29. #include <assert.h>
  30.  
  31. #include "pipe/p_state.h"
  32.  
  33. #include "util/u_memory.h"
  34. #include "util/u_debug.h"
  35. #include "util/u_rect.h"
  36. #include "util/u_surface.h"
  37. #include "util/u_video.h"
  38. #include "vl/vl_defines.h"
  39.  
  40. #include "vdpau_private.h"
  41.  
  42. enum getbits_conversion {
  43.    CONVERSION_NONE,
  44.    CONVERSION_NV12_TO_YV12,
  45.    CONVERSION_YV12_TO_NV12,
  46.    CONVERSION_SWAP_YUYV_UYVY,
  47. };
  48.  
  49. /**
  50.  * Create a VdpVideoSurface.
  51.  */
  52. VdpStatus
  53. vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
  54.                         uint32_t width, uint32_t height,
  55.                         VdpVideoSurface *surface)
  56. {
  57.    struct pipe_context *pipe;
  58.    vlVdpSurface *p_surf;
  59.    VdpStatus ret;
  60.  
  61.    if (!(width && height)) {
  62.       ret = VDP_STATUS_INVALID_SIZE;
  63.       goto inv_size;
  64.    }
  65.  
  66.    p_surf = CALLOC(1, sizeof(vlVdpSurface));
  67.    if (!p_surf) {
  68.       ret = VDP_STATUS_RESOURCES;
  69.       goto no_res;
  70.    }
  71.  
  72.    vlVdpDevice *dev = vlGetDataHTAB(device);
  73.    if (!dev) {
  74.       ret = VDP_STATUS_INVALID_HANDLE;
  75.       goto inv_device;
  76.    }
  77.  
  78.    DeviceReference(&p_surf->device, dev);
  79.    pipe = dev->context;
  80.  
  81.    pipe_mutex_lock(dev->mutex);
  82.    memset(&p_surf->templat, 0, sizeof(p_surf->templat));
  83.    p_surf->templat.buffer_format = pipe->screen->get_video_param
  84.    (
  85.       pipe->screen,
  86.       PIPE_VIDEO_PROFILE_UNKNOWN,
  87.       PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
  88.       PIPE_VIDEO_CAP_PREFERED_FORMAT
  89.    );
  90.    p_surf->templat.chroma_format = ChromaToPipe(chroma_type);
  91.    p_surf->templat.width = width;
  92.    p_surf->templat.height = height;
  93.    p_surf->templat.interlaced = pipe->screen->get_video_param
  94.    (
  95.       pipe->screen,
  96.       PIPE_VIDEO_PROFILE_UNKNOWN,
  97.       PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
  98.       PIPE_VIDEO_CAP_PREFERS_INTERLACED
  99.    );
  100.    if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE)
  101.       p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
  102.  
  103.    /* do not mandate early allocation of a video buffer */
  104.    vlVdpVideoSurfaceClear(p_surf);
  105.    pipe_mutex_unlock(dev->mutex);
  106.  
  107.    *surface = vlAddDataHTAB(p_surf);
  108.    if (*surface == 0) {
  109.       ret = VDP_STATUS_ERROR;
  110.       goto no_handle;
  111.    }
  112.  
  113.    return VDP_STATUS_OK;
  114.  
  115. no_handle:
  116.    p_surf->video_buffer->destroy(p_surf->video_buffer);
  117.  
  118. inv_device:
  119.    DeviceReference(&p_surf->device, NULL);
  120.    FREE(p_surf);
  121.  
  122. no_res:
  123. inv_size:
  124.    return ret;
  125. }
  126.  
  127. /**
  128.  * Destroy a VdpVideoSurface.
  129.  */
  130. VdpStatus
  131. vlVdpVideoSurfaceDestroy(VdpVideoSurface surface)
  132. {
  133.    vlVdpSurface *p_surf;
  134.  
  135.    p_surf = (vlVdpSurface *)vlGetDataHTAB((vlHandle)surface);
  136.    if (!p_surf)
  137.       return VDP_STATUS_INVALID_HANDLE;
  138.  
  139.    pipe_mutex_lock(p_surf->device->mutex);
  140.    if (p_surf->video_buffer)
  141.       p_surf->video_buffer->destroy(p_surf->video_buffer);
  142.    pipe_mutex_unlock(p_surf->device->mutex);
  143.  
  144.    vlRemoveDataHTAB(surface);
  145.    DeviceReference(&p_surf->device, NULL);
  146.    FREE(p_surf);
  147.  
  148.    return VDP_STATUS_OK;
  149. }
  150.  
  151. /**
  152.  * Retrieve the parameters used to create a VdpVideoSurface.
  153.  */
  154. VdpStatus
  155. vlVdpVideoSurfaceGetParameters(VdpVideoSurface surface,
  156.                                VdpChromaType *chroma_type,
  157.                                uint32_t *width, uint32_t *height)
  158. {
  159.    if (!(width && height && chroma_type))
  160.       return VDP_STATUS_INVALID_POINTER;
  161.  
  162.    vlVdpSurface *p_surf = vlGetDataHTAB(surface);
  163.    if (!p_surf)
  164.       return VDP_STATUS_INVALID_HANDLE;
  165.  
  166.    if (p_surf->video_buffer) {
  167.       *width = p_surf->video_buffer->width;
  168.       *height = p_surf->video_buffer->height;
  169.       *chroma_type = PipeToChroma(p_surf->video_buffer->chroma_format);
  170.    } else {
  171.       *width = p_surf->templat.width;
  172.       *height = p_surf->templat.height;
  173.       *chroma_type = PipeToChroma(p_surf->templat.chroma_format);
  174.    }
  175.  
  176.    return VDP_STATUS_OK;
  177. }
  178.  
  179. static void
  180. vlVdpVideoSurfaceSize(vlVdpSurface *p_surf, int component,
  181.                       unsigned *width, unsigned *height)
  182. {
  183.    *width = p_surf->templat.width;
  184.    *height = p_surf->templat.height;
  185.  
  186.    if (component > 0) {
  187.       if (p_surf->templat.chroma_format == PIPE_VIDEO_CHROMA_FORMAT_420) {
  188.          *width /= 2;
  189.          *height /= 2;
  190.       } else if (p_surf->templat.chroma_format == PIPE_VIDEO_CHROMA_FORMAT_422) {
  191.          *width /= 2;
  192.       }
  193.    }
  194.    if (p_surf->templat.interlaced)
  195.       *height /= 2;
  196. }
  197.  
  198. /**
  199.  * Copy image data from a VdpVideoSurface to application memory in a specified
  200.  * YCbCr format.
  201.  */
  202. VdpStatus
  203. vlVdpVideoSurfaceGetBitsYCbCr(VdpVideoSurface surface,
  204.                               VdpYCbCrFormat destination_ycbcr_format,
  205.                               void *const *destination_data,
  206.                               uint32_t const *destination_pitches)
  207. {
  208.    vlVdpSurface *vlsurface;
  209.    struct pipe_context *pipe;
  210.    enum pipe_format format, buffer_format;
  211.    struct pipe_sampler_view **sampler_views;
  212.    enum getbits_conversion conversion = CONVERSION_NONE;
  213.    unsigned i, j;
  214.  
  215.    vlsurface = vlGetDataHTAB(surface);
  216.    if (!vlsurface)
  217.       return VDP_STATUS_INVALID_HANDLE;
  218.  
  219.    pipe = vlsurface->device->context;
  220.    if (!pipe)
  221.       return VDP_STATUS_INVALID_HANDLE;
  222.  
  223.    format = FormatYCBCRToPipe(destination_ycbcr_format);
  224.    if (format == PIPE_FORMAT_NONE)
  225.       return VDP_STATUS_INVALID_Y_CB_CR_FORMAT;
  226.  
  227.    if (vlsurface->video_buffer == NULL)
  228.       return VDP_STATUS_INVALID_VALUE;
  229.  
  230.    buffer_format = vlsurface->video_buffer->buffer_format;
  231.    if (format != buffer_format) {
  232.       if (format == PIPE_FORMAT_YV12 && buffer_format == PIPE_FORMAT_NV12)
  233.          conversion = CONVERSION_NV12_TO_YV12;
  234.       else if (format == PIPE_FORMAT_NV12 && buffer_format == PIPE_FORMAT_YV12)
  235.          conversion = CONVERSION_YV12_TO_NV12;
  236.       else if ((format == PIPE_FORMAT_YUYV && buffer_format == PIPE_FORMAT_UYVY) ||
  237.                (format == PIPE_FORMAT_UYVY && buffer_format == PIPE_FORMAT_YUYV))
  238.          conversion = CONVERSION_SWAP_YUYV_UYVY;
  239.       else
  240.          return VDP_STATUS_NO_IMPLEMENTATION;
  241.    }
  242.  
  243.    pipe_mutex_lock(vlsurface->device->mutex);
  244.    sampler_views = vlsurface->video_buffer->get_sampler_view_planes(vlsurface->video_buffer);
  245.    if (!sampler_views) {
  246.       pipe_mutex_unlock(vlsurface->device->mutex);
  247.       return VDP_STATUS_RESOURCES;
  248.    }
  249.  
  250.    for (i = 0; i < 3; ++i) {
  251.       unsigned width, height;
  252.       struct pipe_sampler_view *sv = sampler_views[i];
  253.       if (!sv) continue;
  254.  
  255.       vlVdpVideoSurfaceSize(vlsurface, i, &width, &height);
  256.  
  257.       for (j = 0; j < sv->texture->array_size; ++j) {
  258.          struct pipe_box box = {
  259.             0, 0, j,
  260.             width, height, 1
  261.          };
  262.          struct pipe_transfer *transfer;
  263.          uint8_t *map;
  264.  
  265.          map = pipe->transfer_map(pipe, sv->texture, 0,
  266.                                        PIPE_TRANSFER_READ, &box, &transfer);
  267.          if (!map) {
  268.             pipe_mutex_unlock(vlsurface->device->mutex);
  269.             return VDP_STATUS_RESOURCES;
  270.          }
  271.  
  272.          if (conversion == CONVERSION_NV12_TO_YV12 && i == 1) {
  273.             u_copy_nv12_to_yv12(destination_data, destination_pitches,
  274.                                 i, j, transfer->stride, sv->texture->array_size,
  275.                                 map, box.width, box.height);
  276.          } else if (conversion == CONVERSION_YV12_TO_NV12 && i > 0) {
  277.             u_copy_yv12_to_nv12(destination_data, destination_pitches,
  278.                                 i, j, transfer->stride, sv->texture->array_size,
  279.                                 map, box.width, box.height);
  280.          } else if (conversion == CONVERSION_SWAP_YUYV_UYVY) {
  281.             u_copy_swap422_packed(destination_data, destination_pitches,
  282.                                    i, j, transfer->stride, sv->texture->array_size,
  283.                                    map, box.width, box.height);
  284.          } else {
  285.             util_copy_rect(destination_data[i] + destination_pitches[i] * j, sv->texture->format,
  286.                            destination_pitches[i] * sv->texture->array_size, 0, 0,
  287.                            box.width, box.height, map, transfer->stride, 0, 0);
  288.          }
  289.  
  290.          pipe_transfer_unmap(pipe, transfer);
  291.       }
  292.    }
  293.    pipe_mutex_unlock(vlsurface->device->mutex);
  294.  
  295.    return VDP_STATUS_OK;
  296. }
  297.  
  298. /**
  299.  * Copy image data from application memory in a specific YCbCr format to
  300.  * a VdpVideoSurface.
  301.  */
  302. VdpStatus
  303. vlVdpVideoSurfacePutBitsYCbCr(VdpVideoSurface surface,
  304.                               VdpYCbCrFormat source_ycbcr_format,
  305.                               void const *const *source_data,
  306.                               uint32_t const *source_pitches)
  307. {
  308.    enum pipe_format pformat = FormatYCBCRToPipe(source_ycbcr_format);
  309.    struct pipe_context *pipe;
  310.    struct pipe_sampler_view **sampler_views;
  311.    unsigned i, j;
  312.  
  313.    vlVdpSurface *p_surf = vlGetDataHTAB(surface);
  314.    if (!p_surf)
  315.       return VDP_STATUS_INVALID_HANDLE;
  316.  
  317.    pipe = p_surf->device->context;
  318.    if (!pipe)
  319.       return VDP_STATUS_INVALID_HANDLE;
  320.  
  321.    pipe_mutex_lock(p_surf->device->mutex);
  322.    if (p_surf->video_buffer == NULL || pformat != p_surf->video_buffer->buffer_format) {
  323.  
  324.       /* destroy the old one */
  325.       if (p_surf->video_buffer)
  326.          p_surf->video_buffer->destroy(p_surf->video_buffer);
  327.  
  328.       /* adjust the template parameters */
  329.       p_surf->templat.buffer_format = pformat;
  330.  
  331.       /* and try to create the video buffer with the new format */
  332.       p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
  333.  
  334.       /* stil no luck? ok forget it we don't support it */
  335.       if (!p_surf->video_buffer) {
  336.          pipe_mutex_unlock(p_surf->device->mutex);
  337.          return VDP_STATUS_NO_IMPLEMENTATION;
  338.       }
  339.       vlVdpVideoSurfaceClear(p_surf);
  340.    }
  341.  
  342.    sampler_views = p_surf->video_buffer->get_sampler_view_planes(p_surf->video_buffer);
  343.    if (!sampler_views) {
  344.       pipe_mutex_unlock(p_surf->device->mutex);
  345.       return VDP_STATUS_RESOURCES;
  346.    }
  347.  
  348.    for (i = 0; i < 3; ++i) {
  349.       unsigned width, height;
  350.       struct pipe_sampler_view *sv = sampler_views[i];
  351.       if (!sv || !source_pitches[i]) continue;
  352.  
  353.       vlVdpVideoSurfaceSize(p_surf, i, &width, &height);
  354.  
  355.       for (j = 0; j < sv->texture->array_size; ++j) {
  356.          struct pipe_box dst_box = {
  357.             0, 0, j,
  358.             width, height, 1
  359.          };
  360.  
  361.          pipe->transfer_inline_write(pipe, sv->texture, 0,
  362.                                      PIPE_TRANSFER_WRITE, &dst_box,
  363.                                      source_data[i] + source_pitches[i] * j,
  364.                                      source_pitches[i] * sv->texture->array_size,
  365.                                      0);
  366.       }
  367.    }
  368.    pipe_mutex_unlock(p_surf->device->mutex);
  369.  
  370.    return VDP_STATUS_OK;
  371. }
  372.  
  373. /**
  374.  * Helper function to initially clear the VideoSurface after (re-)creation
  375.  */
  376. void
  377. vlVdpVideoSurfaceClear(vlVdpSurface *vlsurf)
  378. {
  379.    struct pipe_context *pipe = vlsurf->device->context;
  380.    struct pipe_surface **surfaces;
  381.    unsigned i;
  382.  
  383.    if (!vlsurf->video_buffer)
  384.       return;
  385.  
  386.    surfaces = vlsurf->video_buffer->get_surfaces(vlsurf->video_buffer);
  387.    for (i = 0; i < VL_MAX_SURFACES; ++i) {
  388.       union pipe_color_union c = {};
  389.  
  390.       if (!surfaces[i])
  391.          continue;
  392.  
  393.       if (i > !!vlsurf->templat.interlaced)
  394.          c.f[0] = c.f[1] = c.f[2] = c.f[3] = 0.5f;
  395.  
  396.       pipe->clear_render_target(pipe, surfaces[i], &c, 0, 0,
  397.                                 surfaces[i]->width, surfaces[i]->height);
  398.    }
  399.    pipe->flush(pipe, NULL, 0);
  400. }
  401.  
  402. /**
  403.  * Interop to mesa state tracker
  404.  */
  405. struct pipe_video_buffer *vlVdpVideoSurfaceGallium(VdpVideoSurface surface)
  406. {
  407.    vlVdpSurface *p_surf = vlGetDataHTAB(surface);
  408.    if (!p_surf)
  409.       return NULL;
  410.  
  411.    pipe_mutex_lock(p_surf->device->mutex);
  412.    if (p_surf->video_buffer == NULL) {
  413.       struct pipe_context *pipe = p_surf->device->context;
  414.  
  415.       /* try to create a video buffer if we don't already have one */
  416.       p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
  417.    }
  418.    pipe_mutex_unlock(p_surf->device->mutex);
  419.  
  420.    return p_surf->video_buffer;
  421. }
  422.