Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Blame | Last modification | View Log | RSS feed

  1. /**********************************************************
  2.  * Copyright 2009-2011 VMware, Inc. All rights reserved.
  3.  *
  4.  * Permission is hereby granted, free of charge, to any person
  5.  * obtaining a copy of this software and associated documentation
  6.  * files (the "Software"), to deal in the Software without
  7.  * restriction, including without limitation the rights to use, copy,
  8.  * modify, merge, publish, distribute, sublicense, and/or sell copies
  9.  * of the Software, and to permit persons to whom the Software is
  10.  * furnished to do so, subject to the following conditions:
  11.  *
  12.  * The above copyright notice and this permission notice shall be
  13.  * included in all copies or substantial portions of the Software.
  14.  *
  15.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  16.  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  17.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  18.  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
  19.  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
  20.  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
  21.  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  22.  * SOFTWARE.
  23.  *
  24.  *********************************************************
  25.  * Authors:
  26.  * Zack Rusin <zackr-at-vmware-dot-com>
  27.  * Thomas Hellstrom <thellstrom-at-vmware-dot-com>
  28.  */
  29. #include "xa_context.h"
  30. #include "xa_priv.h"
  31. #include "cso_cache/cso_context.h"
  32. #include "util/u_inlines.h"
  33. #include "util/u_rect.h"
  34. #include "util/u_surface.h"
  35. #include "pipe/p_context.h"
  36.  
  37.  
  38. XA_EXPORT struct xa_context *
  39. xa_context_default(struct xa_tracker *xa)
  40. {
  41.     return xa->default_ctx;
  42. }
  43.  
  44. XA_EXPORT struct xa_context *
  45. xa_context_create(struct xa_tracker *xa)
  46. {
  47.     struct xa_context *ctx = calloc(1, sizeof(*ctx));
  48.  
  49.     ctx->xa = xa;
  50.     ctx->pipe = xa->screen->context_create(xa->screen, NULL);
  51.     ctx->cso = cso_create_context(ctx->pipe);
  52.     ctx->shaders = xa_shaders_create(ctx);
  53.     renderer_init_state(ctx);
  54.  
  55.     return ctx;
  56. }
  57.  
  58. XA_EXPORT void
  59. xa_context_destroy(struct xa_context *r)
  60. {
  61.     struct pipe_resource **vsbuf = &r->vs_const_buffer;
  62.     struct pipe_resource **fsbuf = &r->fs_const_buffer;
  63.  
  64.     if (*vsbuf)
  65.         pipe_resource_reference(vsbuf, NULL);
  66.  
  67.     if (*fsbuf)
  68.         pipe_resource_reference(fsbuf, NULL);
  69.  
  70.     if (r->shaders) {
  71.         xa_shaders_destroy(r->shaders);
  72.         r->shaders = NULL;
  73.     }
  74.  
  75.     xa_ctx_sampler_views_destroy(r);
  76.  
  77.     if (r->cso) {
  78.         cso_release_all(r->cso);
  79.         cso_destroy_context(r->cso);
  80.         r->cso = NULL;
  81.     }
  82.  
  83.     r->pipe->destroy(r->pipe);
  84. }
  85.  
  86. XA_EXPORT int
  87. xa_surface_dma(struct xa_context *ctx,
  88.                struct xa_surface *srf,
  89.                void *data,
  90.                unsigned int pitch,
  91.                int to_surface, struct xa_box *boxes, unsigned int num_boxes)
  92. {
  93.     struct pipe_transfer *transfer;
  94.     void *map;
  95.     int w, h, i;
  96.     enum pipe_transfer_usage transfer_direction;
  97.     struct pipe_context *pipe = ctx->pipe;
  98.  
  99.     transfer_direction = (to_surface ? PIPE_TRANSFER_WRITE :
  100.                           PIPE_TRANSFER_READ);
  101.  
  102.     for (i = 0; i < num_boxes; ++i, ++boxes) {
  103.         w = boxes->x2 - boxes->x1;
  104.         h = boxes->y2 - boxes->y1;
  105.  
  106.         map = pipe_transfer_map(pipe, srf->tex, 0, 0,
  107.                                 transfer_direction, boxes->x1, boxes->y1,
  108.                                 w, h, &transfer);
  109.         if (!map)
  110.             return -XA_ERR_NORES;
  111.  
  112.         if (to_surface) {
  113.             util_copy_rect(map, srf->tex->format, transfer->stride,
  114.                            0, 0, w, h, data, pitch, boxes->x1, boxes->y1);
  115.         } else {
  116.             util_copy_rect(data, srf->tex->format, pitch,
  117.                            boxes->x1, boxes->y1, w, h, map, transfer->stride, 0,
  118.                            0);
  119.         }
  120.         pipe->transfer_unmap(pipe, transfer);
  121.         if (to_surface)
  122.             pipe->flush(pipe, &ctx->last_fence, 0);
  123.     }
  124.     return XA_ERR_NONE;
  125. }
  126.  
  127. XA_EXPORT void *
  128. xa_surface_map(struct xa_context *ctx,
  129.                struct xa_surface *srf, unsigned int usage)
  130. {
  131.     void *map;
  132.     unsigned int transfer_direction = 0;
  133.     struct pipe_context *pipe = ctx->pipe;
  134.  
  135.     /*
  136.      * A surface may only have a single map.
  137.      */
  138.     if (srf->transfer)
  139.         return NULL;
  140.  
  141.     if (usage & XA_MAP_READ)
  142.         transfer_direction = PIPE_TRANSFER_READ;
  143.     if (usage & XA_MAP_WRITE)
  144.         transfer_direction = PIPE_TRANSFER_WRITE;
  145.  
  146.     if (!transfer_direction)
  147.         return NULL;
  148.  
  149.     map = pipe_transfer_map(pipe, srf->tex, 0, 0,
  150.                             transfer_direction, 0, 0,
  151.                             srf->tex->width0, srf->tex->height0,
  152.                             &srf->transfer);
  153.     if (!map)
  154.         return NULL;
  155.  
  156.     srf->mapping_pipe = pipe;
  157.     return map;
  158. }
  159.  
  160. XA_EXPORT void
  161. xa_surface_unmap(struct xa_surface *srf)
  162. {
  163.     if (srf->transfer) {
  164.         struct pipe_context *pipe = srf->mapping_pipe;
  165.  
  166.         pipe->transfer_unmap(pipe, srf->transfer);
  167.         srf->transfer = NULL;
  168.     }
  169. }
  170.  
  171. int
  172. xa_ctx_srf_create(struct xa_context *ctx, struct xa_surface *dst)
  173. {
  174.     struct pipe_screen *screen = ctx->pipe->screen;
  175.     struct pipe_surface srf_templ;
  176.  
  177.     if (ctx->srf)
  178.         return -XA_ERR_INVAL;
  179.  
  180.     if (!screen->is_format_supported(screen,  dst->tex->format,
  181.                                      PIPE_TEXTURE_2D, 0,
  182.                                      PIPE_BIND_RENDER_TARGET))
  183.         return -XA_ERR_INVAL;
  184.  
  185.     u_surface_default_template(&srf_templ, dst->tex);
  186.     ctx->srf = ctx->pipe->create_surface(ctx->pipe, dst->tex, &srf_templ);
  187.     if (!ctx->srf)
  188.         return -XA_ERR_NORES;
  189.  
  190.     return XA_ERR_NONE;
  191. }
  192.  
  193. void
  194. xa_ctx_srf_destroy(struct xa_context *ctx)
  195. {
  196.     pipe_surface_reference(&ctx->srf, NULL);
  197. }
  198.  
  199. XA_EXPORT int
  200. xa_copy_prepare(struct xa_context *ctx,
  201.                 struct xa_surface *dst, struct xa_surface *src)
  202. {
  203.     if (src == dst || ctx->srf != NULL)
  204.         return -XA_ERR_INVAL;
  205.  
  206.     if (src->tex->format != dst->tex->format) {
  207.         int ret = xa_ctx_srf_create(ctx, dst);
  208.         if (ret != XA_ERR_NONE)
  209.             return ret;
  210.         renderer_copy_prepare(ctx, ctx->srf, src->tex,
  211.                               src->fdesc.xa_format,
  212.                               dst->fdesc.xa_format);
  213.         ctx->simple_copy = 0;
  214.     } else
  215.         ctx->simple_copy = 1;
  216.  
  217.     ctx->src = src;
  218.     ctx->dst = dst;
  219.     xa_ctx_srf_destroy(ctx);
  220.  
  221.     return 0;
  222. }
  223.  
  224. XA_EXPORT void
  225. xa_copy(struct xa_context *ctx,
  226.         int dx, int dy, int sx, int sy, int width, int height)
  227. {
  228.     struct pipe_box src_box;
  229.  
  230.     if (ctx->simple_copy) {
  231.         u_box_2d(sx, sy, width, height, &src_box);
  232.         ctx->pipe->resource_copy_region(ctx->pipe,
  233.                                         ctx->dst->tex, 0, dx, dy, 0,
  234.                                         ctx->src->tex,
  235.                                         0, &src_box);
  236.     } else
  237.         renderer_copy(ctx, dx, dy, sx, sy, width, height,
  238.                       (float) ctx->src->tex->width0,
  239.                       (float) ctx->src->tex->height0);
  240. }
  241.  
  242. XA_EXPORT void
  243. xa_copy_done(struct xa_context *ctx)
  244. {
  245.     if (!ctx->simple_copy) {
  246.            renderer_draw_flush(ctx);
  247.            ctx->pipe->flush(ctx->pipe, &ctx->last_fence, 0);
  248.     } else
  249.         ctx->pipe->flush(ctx->pipe, &ctx->last_fence, 0);
  250. }
  251.  
  252. static void
  253. bind_solid_blend_state(struct xa_context *ctx)
  254. {
  255.     struct pipe_blend_state blend;
  256.  
  257.     memset(&blend, 0, sizeof(struct pipe_blend_state));
  258.     blend.rt[0].blend_enable = 0;
  259.     blend.rt[0].colormask = PIPE_MASK_RGBA;
  260.  
  261.     blend.rt[0].rgb_src_factor   = PIPE_BLENDFACTOR_ONE;
  262.     blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
  263.     blend.rt[0].rgb_dst_factor   = PIPE_BLENDFACTOR_ZERO;
  264.     blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO;
  265.  
  266.     cso_set_blend(ctx->cso, &blend);
  267. }
  268.  
  269. XA_EXPORT int
  270. xa_solid_prepare(struct xa_context *ctx, struct xa_surface *dst,
  271.                  uint32_t fg)
  272. {
  273.     unsigned vs_traits, fs_traits;
  274.     struct xa_shader shader;
  275.     int width, height;
  276.     int ret;
  277.  
  278.     ret = xa_ctx_srf_create(ctx, dst);
  279.     if (ret != XA_ERR_NONE)
  280.         return ret;
  281.  
  282.     if (ctx->srf->format == PIPE_FORMAT_L8_UNORM)
  283.         xa_pixel_to_float4_a8(fg, ctx->solid_color);
  284.     else
  285.         xa_pixel_to_float4(fg, ctx->solid_color);
  286.     ctx->has_solid_color = 1;
  287.  
  288.     ctx->dst = dst;
  289.     width = ctx->srf->width;
  290.     height = ctx->srf->height;
  291.  
  292. #if 0
  293.     debug_printf("Color Pixel=(%d, %d, %d, %d), RGBA=(%f, %f, %f, %f)\n",
  294.                  (fg >> 24) & 0xff, (fg >> 16) & 0xff,
  295.                  (fg >> 8) & 0xff,  (fg >> 0) & 0xff,
  296.                  exa->solid_color[0], exa->solid_color[1],
  297.                  exa->solid_color[2], exa->solid_color[3]);
  298. #endif
  299.  
  300.     vs_traits = VS_SOLID_FILL;
  301.     fs_traits = FS_SOLID_FILL;
  302.  
  303.     renderer_bind_destination(ctx, ctx->srf, width, height);
  304.     bind_solid_blend_state(ctx);
  305.     cso_set_samplers(ctx->cso, PIPE_SHADER_FRAGMENT, 0, NULL);
  306.     cso_set_sampler_views(ctx->cso, PIPE_SHADER_FRAGMENT, 0, NULL);
  307.  
  308.     shader = xa_shaders_get(ctx->shaders, vs_traits, fs_traits);
  309.     cso_set_vertex_shader_handle(ctx->cso, shader.vs);
  310.     cso_set_fragment_shader_handle(ctx->cso, shader.fs);
  311.  
  312.     renderer_begin_solid(ctx);
  313.  
  314.     xa_ctx_srf_destroy(ctx);
  315.     return XA_ERR_NONE;
  316. }
  317.  
  318. XA_EXPORT void
  319. xa_solid(struct xa_context *ctx, int x, int y, int width, int height)
  320. {
  321.     renderer_solid(ctx, x, y, x + width, y + height, ctx->solid_color);
  322. }
  323.  
  324. XA_EXPORT void
  325. xa_solid_done(struct xa_context *ctx)
  326. {
  327.     renderer_draw_flush(ctx);
  328.     ctx->pipe->flush(ctx->pipe, &ctx->last_fence, 0);
  329.  
  330.     ctx->comp = NULL;
  331.     ctx->has_solid_color = FALSE;
  332.     ctx->num_bound_samplers = 0;
  333. }
  334.  
  335. XA_EXPORT struct xa_fence *
  336. xa_fence_get(struct xa_context *ctx)
  337. {
  338.     struct xa_fence *fence = calloc(1, sizeof(*fence));
  339.     struct pipe_screen *screen = ctx->xa->screen;
  340.  
  341.     if (!fence)
  342.         return NULL;
  343.  
  344.     fence->xa = ctx->xa;
  345.  
  346.     if (ctx->last_fence == NULL)
  347.         fence->pipe_fence = NULL;
  348.     else
  349.         screen->fence_reference(screen, &fence->pipe_fence, ctx->last_fence);
  350.  
  351.     return fence;
  352. }
  353.  
  354. XA_EXPORT int
  355. xa_fence_wait(struct xa_fence *fence, uint64_t timeout)
  356. {
  357.     if (!fence)
  358.         return XA_ERR_NONE;
  359.  
  360.     if (fence->pipe_fence) {
  361.         struct pipe_screen *screen = fence->xa->screen;
  362.         boolean timed_out;
  363.  
  364.         timed_out = !screen->fence_finish(screen, fence->pipe_fence, timeout);
  365.         if (timed_out)
  366.             return -XA_ERR_BUSY;
  367.  
  368.         screen->fence_reference(screen, &fence->pipe_fence, NULL);
  369.     }
  370.     return XA_ERR_NONE;
  371. }
  372.  
  373. XA_EXPORT void
  374. xa_fence_destroy(struct xa_fence *fence)
  375. {
  376.     if (!fence)
  377.         return;
  378.  
  379.     if (fence->pipe_fence) {
  380.         struct pipe_screen *screen = fence->xa->screen;
  381.  
  382.         screen->fence_reference(screen, &fence->pipe_fence, NULL);
  383.     }
  384.  
  385.     free(fence);
  386. }
  387.  
  388. void
  389. xa_ctx_sampler_views_destroy(struct xa_context *ctx)
  390. {
  391.     int i;
  392.  
  393.     for (i = 0; i < ctx->num_bound_samplers; ++i)
  394.         pipe_sampler_view_reference(&ctx->bound_sampler_views[i], NULL);
  395.     ctx->num_bound_samplers = 0;
  396. }
  397.