Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /**************************************************************************
  2.  *
  3.  * Copyright 2007 VMware, Inc.
  4.  * All Rights Reserved.
  5.  *
  6.  * Permission is hereby granted, free of charge, to any person obtaining a
  7.  * copy of this software and associated documentation files (the
  8.  * "Software"), to deal in the Software without restriction, including
  9.  * without limitation the rights to use, copy, modify, merge, publish,
  10.  * distribute, sub license, and/or sell copies of the Software, and to
  11.  * permit persons to whom the Software is furnished to do so, subject to
  12.  * the following conditions:
  13.  *
  14.  * The above copyright notice and this permission notice (including the
  15.  * next paragraph) shall be included in all copies or substantial portions
  16.  * of the Software.
  17.  *
  18.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  19.  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  20.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  21.  * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
  22.  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  23.  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  24.  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  25.  *
  26.  **************************************************************************/
  27.  
  28. /**
  29.  * Tiling engine.
  30.  *
  31.  * Builds per-tile display lists and executes them on calls to
  32.  * lp_setup_flush().
  33.  */
  34.  
  35. #include <limits.h>
  36.  
  37. #include "pipe/p_defines.h"
  38. #include "util/u_framebuffer.h"
  39. #include "util/u_inlines.h"
  40. #include "util/u_memory.h"
  41. #include "util/u_pack_color.h"
  42. #include "draw/draw_pipe.h"
  43. #include "os/os_time.h"
  44. #include "lp_context.h"
  45. #include "lp_memory.h"
  46. #include "lp_scene.h"
  47. #include "lp_texture.h"
  48. #include "lp_debug.h"
  49. #include "lp_fence.h"
  50. #include "lp_query.h"
  51. #include "lp_rast.h"
  52. #include "lp_setup_context.h"
  53. #include "lp_screen.h"
  54. #include "lp_state.h"
  55. #include "state_tracker/sw_winsys.h"
  56.  
  57. #include "draw/draw_context.h"
  58. #include "draw/draw_vbuf.h"
  59.  
  60.  
  61. static boolean set_scene_state( struct lp_setup_context *, enum setup_state,
  62.                              const char *reason);
  63. static boolean try_update_scene_state( struct lp_setup_context *setup );
  64.  
  65.  
  66. static void
  67. lp_setup_get_empty_scene(struct lp_setup_context *setup)
  68. {
  69.    assert(setup->scene == NULL);
  70.  
  71.    setup->scene_idx++;
  72.    setup->scene_idx %= Elements(setup->scenes);
  73.  
  74.    setup->scene = setup->scenes[setup->scene_idx];
  75.  
  76.    if (setup->scene->fence) {
  77.       if (LP_DEBUG & DEBUG_SETUP)
  78.          debug_printf("%s: wait for scene %d\n",
  79.                       __FUNCTION__, setup->scene->fence->id);
  80.  
  81.       lp_fence_wait(setup->scene->fence);
  82.    }
  83.  
  84.    lp_scene_begin_binning(setup->scene, &setup->fb, setup->rasterizer_discard);
  85.  
  86. }
  87.  
  88.  
  89. static void
  90. first_triangle( struct lp_setup_context *setup,
  91.                 const float (*v0)[4],
  92.                 const float (*v1)[4],
  93.                 const float (*v2)[4])
  94. {
  95.    assert(setup->state == SETUP_ACTIVE);
  96.    lp_setup_choose_triangle( setup );
  97.    setup->triangle( setup, v0, v1, v2 );
  98. }
  99.  
  100. static void
  101. first_line( struct lp_setup_context *setup,
  102.             const float (*v0)[4],
  103.             const float (*v1)[4])
  104. {
  105.    assert(setup->state == SETUP_ACTIVE);
  106.    lp_setup_choose_line( setup );
  107.    setup->line( setup, v0, v1 );
  108. }
  109.  
  110. static void
  111. first_point( struct lp_setup_context *setup,
  112.              const float (*v0)[4])
  113. {
  114.    assert(setup->state == SETUP_ACTIVE);
  115.    lp_setup_choose_point( setup );
  116.    setup->point( setup, v0 );
  117. }
  118.  
  119. void lp_setup_reset( struct lp_setup_context *setup )
  120. {
  121.    unsigned i;
  122.  
  123.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  124.  
  125.    /* Reset derived state */
  126.    for (i = 0; i < Elements(setup->constants); ++i) {
  127.       setup->constants[i].stored_size = 0;
  128.       setup->constants[i].stored_data = NULL;
  129.    }
  130.    setup->fs.stored = NULL;
  131.    setup->dirty = ~0;
  132.  
  133.    /* no current bin */
  134.    setup->scene = NULL;
  135.  
  136.    /* Reset some state:
  137.     */
  138.    memset(&setup->clear, 0, sizeof setup->clear);
  139.  
  140.    /* Have an explicit "start-binning" call and get rid of this
  141.     * pointer twiddling?
  142.     */
  143.    setup->line = first_line;
  144.    setup->point = first_point;
  145.    setup->triangle = first_triangle;
  146. }
  147.  
  148.  
  149. /** Rasterize all scene's bins */
  150. static void
  151. lp_setup_rasterize_scene( struct lp_setup_context *setup )
  152. {
  153.    struct lp_scene *scene = setup->scene;
  154.    struct llvmpipe_screen *screen = llvmpipe_screen(scene->pipe->screen);
  155.  
  156.    scene->num_active_queries = setup->active_binned_queries;
  157.    memcpy(scene->active_queries, setup->active_queries,
  158.           scene->num_active_queries * sizeof(scene->active_queries[0]));
  159.  
  160.    lp_scene_end_binning(scene);
  161.  
  162.    lp_fence_reference(&setup->last_fence, scene->fence);
  163.  
  164.    if (setup->last_fence)
  165.       setup->last_fence->issued = TRUE;
  166.  
  167.    pipe_mutex_lock(screen->rast_mutex);
  168.  
  169.    /* FIXME: We enqueue the scene then wait on the rasterizer to finish.
  170.     * This means we never actually run any vertex stuff in parallel to
  171.     * rasterization (not in the same context at least) which is what the
  172.     * multiple scenes per setup is about - when we get a new empty scene
  173.     * any old one is already empty again because we waited here for
  174.     * raster tasks to be finished. Ideally, we shouldn't need to wait here
  175.     * and rely on fences elsewhere when waiting is necessary.
  176.     * Certainly, lp_scene_end_rasterization() would need to be deferred too
  177.     * and there's probably other bits why this doesn't actually work.
  178.     */
  179.    lp_rast_queue_scene(screen->rast, scene);
  180.    lp_rast_finish(screen->rast);
  181.    pipe_mutex_unlock(screen->rast_mutex);
  182.  
  183.    lp_scene_end_rasterization(setup->scene);
  184.    lp_setup_reset( setup );
  185.  
  186.    LP_DBG(DEBUG_SETUP, "%s done \n", __FUNCTION__);
  187. }
  188.  
  189.  
  190.  
  191. static boolean
  192. begin_binning( struct lp_setup_context *setup )
  193. {
  194.    struct lp_scene *scene = setup->scene;
  195.    boolean need_zsload = FALSE;
  196.    boolean ok;
  197.  
  198.    assert(scene);
  199.    assert(scene->fence == NULL);
  200.  
  201.    /* Always create a fence:
  202.     */
  203.    scene->fence = lp_fence_create(MAX2(1, setup->num_threads));
  204.    if (!scene->fence)
  205.       return FALSE;
  206.  
  207.    ok = try_update_scene_state(setup);
  208.    if (!ok)
  209.       return FALSE;
  210.  
  211.    if (setup->fb.zsbuf &&
  212.        ((setup->clear.flags & PIPE_CLEAR_DEPTHSTENCIL) != PIPE_CLEAR_DEPTHSTENCIL) &&
  213.         util_format_is_depth_and_stencil(setup->fb.zsbuf->format))
  214.       need_zsload = TRUE;
  215.  
  216.    LP_DBG(DEBUG_SETUP, "%s color clear bufs: %x depth: %s\n", __FUNCTION__,
  217.           setup->clear.flags >> 2,
  218.           need_zsload ? "clear": "load");
  219.  
  220.    if (setup->clear.flags & PIPE_CLEAR_COLOR) {
  221.       unsigned cbuf;
  222.       for (cbuf = 0; cbuf < setup->fb.nr_cbufs; cbuf++) {
  223.          assert(PIPE_CLEAR_COLOR0 == 1 << 2);
  224.          if (setup->clear.flags & (1 << (2 + cbuf))) {
  225.             union lp_rast_cmd_arg clearrb_arg;
  226.             struct lp_rast_clear_rb *cc_scene =
  227.                (struct lp_rast_clear_rb *)
  228.                   lp_scene_alloc(scene, sizeof(struct lp_rast_clear_rb));
  229.  
  230.             if (!cc_scene) {
  231.                return FALSE;
  232.             }
  233.  
  234.             cc_scene->cbuf = cbuf;
  235.             cc_scene->color_val = setup->clear.color_val[cbuf];
  236.             clearrb_arg.clear_rb = cc_scene;
  237.  
  238.             if (!lp_scene_bin_everywhere(scene,
  239.                                          LP_RAST_OP_CLEAR_COLOR,
  240.                                          clearrb_arg))
  241.                return FALSE;
  242.          }
  243.       }
  244.    }
  245.  
  246.    if (setup->fb.zsbuf) {
  247.       if (setup->clear.flags & PIPE_CLEAR_DEPTHSTENCIL) {
  248.          ok = lp_scene_bin_everywhere( scene,
  249.                                        LP_RAST_OP_CLEAR_ZSTENCIL,
  250.                                        lp_rast_arg_clearzs(
  251.                                           setup->clear.zsvalue,
  252.                                           setup->clear.zsmask));
  253.          if (!ok)
  254.             return FALSE;
  255.       }
  256.    }
  257.  
  258.    setup->clear.flags = 0;
  259.    setup->clear.zsmask = 0;
  260.    setup->clear.zsvalue = 0;
  261.  
  262.    scene->had_queries = !!setup->active_binned_queries;
  263.  
  264.    LP_DBG(DEBUG_SETUP, "%s done\n", __FUNCTION__);
  265.    return TRUE;
  266. }
  267.  
  268.  
  269. /* This basically bins and then flushes any outstanding full-screen
  270.  * clears.  
  271.  *
  272.  * TODO: fast path for fullscreen clears and no triangles.
  273.  */
  274. static boolean
  275. execute_clears( struct lp_setup_context *setup )
  276. {
  277.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  278.  
  279.    return begin_binning( setup );
  280. }
  281.  
  282. const char *states[] = {
  283.    "FLUSHED",
  284.    "CLEARED",
  285.    "ACTIVE "
  286. };
  287.  
  288.  
  289. static boolean
  290. set_scene_state( struct lp_setup_context *setup,
  291.                  enum setup_state new_state,
  292.                  const char *reason)
  293. {
  294.    unsigned old_state = setup->state;
  295.  
  296.    if (old_state == new_state)
  297.       return TRUE;
  298.    
  299.    if (LP_DEBUG & DEBUG_SCENE) {
  300.       debug_printf("%s old %s new %s%s%s\n",
  301.                    __FUNCTION__,
  302.                    states[old_state],
  303.                    states[new_state],
  304.                    (new_state == SETUP_FLUSHED) ? ": " : "",
  305.                    (new_state == SETUP_FLUSHED) ? reason : "");
  306.  
  307.       if (new_state == SETUP_FLUSHED && setup->scene)
  308.          lp_debug_draw_bins_by_cmd_length(setup->scene);
  309.    }
  310.  
  311.    /* wait for a free/empty scene
  312.     */
  313.    if (old_state == SETUP_FLUSHED)
  314.       lp_setup_get_empty_scene(setup);
  315.  
  316.    switch (new_state) {
  317.    case SETUP_CLEARED:
  318.       break;
  319.  
  320.    case SETUP_ACTIVE:
  321.       if (!begin_binning( setup ))
  322.          goto fail;
  323.       break;
  324.  
  325.    case SETUP_FLUSHED:
  326.       if (old_state == SETUP_CLEARED)
  327.          if (!execute_clears( setup ))
  328.             goto fail;
  329.  
  330.       lp_setup_rasterize_scene( setup );
  331.       assert(setup->scene == NULL);
  332.       break;
  333.  
  334.    default:
  335.       assert(0 && "invalid setup state mode");
  336.       goto fail;
  337.    }
  338.  
  339.    setup->state = new_state;
  340.    return TRUE;
  341.  
  342. fail:
  343.    if (setup->scene) {
  344.       lp_scene_end_rasterization(setup->scene);
  345.       setup->scene = NULL;
  346.    }
  347.  
  348.    setup->state = SETUP_FLUSHED;
  349.    lp_setup_reset( setup );
  350.    return FALSE;
  351. }
  352.  
  353.  
  354. void
  355. lp_setup_flush( struct lp_setup_context *setup,
  356.                 struct pipe_fence_handle **fence,
  357.                 const char *reason)
  358. {
  359.    set_scene_state( setup, SETUP_FLUSHED, reason );
  360.  
  361.    if (fence) {
  362.       lp_fence_reference((struct lp_fence **)fence, setup->last_fence);
  363.    }
  364. }
  365.  
  366.  
  367. void
  368. lp_setup_bind_framebuffer( struct lp_setup_context *setup,
  369.                            const struct pipe_framebuffer_state *fb )
  370. {
  371.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  372.  
  373.    /* Flush any old scene.
  374.     */
  375.    set_scene_state( setup, SETUP_FLUSHED, __FUNCTION__ );
  376.  
  377.    /*
  378.     * Ensure the old scene is not reused.
  379.     */
  380.    assert(!setup->scene);
  381.  
  382.    /* Set new state.  This will be picked up later when we next need a
  383.     * scene.
  384.     */
  385.    util_copy_framebuffer_state(&setup->fb, fb);
  386.    setup->framebuffer.x0 = 0;
  387.    setup->framebuffer.y0 = 0;
  388.    setup->framebuffer.x1 = fb->width-1;
  389.    setup->framebuffer.y1 = fb->height-1;
  390.    setup->dirty |= LP_SETUP_NEW_SCISSOR;
  391. }
  392.  
  393.  
  394. /*
  395.  * Try to clear one color buffer of the attached fb, either by binning a clear
  396.  * command or queuing up the clear for later (when binning is started).
  397.  */
  398. static boolean
  399. lp_setup_try_clear_color_buffer(struct lp_setup_context *setup,
  400.                                 const union pipe_color_union *color,
  401.                                 unsigned cbuf)
  402. {
  403.    union lp_rast_cmd_arg clearrb_arg;
  404.    union util_color uc;
  405.    enum pipe_format format = setup->fb.cbufs[cbuf]->format;
  406.  
  407.    LP_DBG(DEBUG_SETUP, "%s state %d\n", __FUNCTION__, setup->state);
  408.  
  409.    if (util_format_is_pure_integer(format)) {
  410.       /*
  411.        * We expect int/uint clear values here, though some APIs
  412.        * might disagree (but in any case util_pack_color()
  413.        * couldn't handle it)...
  414.        */
  415.       if (util_format_is_pure_sint(format)) {
  416.          util_format_write_4i(format, color->i, 0, &uc, 0, 0, 0, 1, 1);
  417.       }
  418.       else {
  419.          assert(util_format_is_pure_uint(format));
  420.          util_format_write_4ui(format, color->ui, 0, &uc, 0, 0, 0, 1, 1);
  421.       }
  422.    }
  423.    else {
  424.       util_pack_color(color->f, format, &uc);
  425.    }
  426.  
  427.    if (setup->state == SETUP_ACTIVE) {
  428.       struct lp_scene *scene = setup->scene;
  429.  
  430.       /* Add the clear to existing scene.  In the unusual case where
  431.        * both color and depth-stencil are being cleared when there's
  432.        * already been some rendering, we could discard the currently
  433.        * binned scene and start again, but I don't see that as being
  434.        * a common usage.
  435.        */
  436.       struct lp_rast_clear_rb *cc_scene =
  437.          (struct lp_rast_clear_rb *)
  438.             lp_scene_alloc_aligned(scene, sizeof(struct lp_rast_clear_rb), 8);
  439.  
  440.       if (!cc_scene) {
  441.          return FALSE;
  442.       }
  443.  
  444.       cc_scene->cbuf = cbuf;
  445.       cc_scene->color_val = uc;
  446.       clearrb_arg.clear_rb = cc_scene;
  447.  
  448.       if (!lp_scene_bin_everywhere(scene,
  449.                                    LP_RAST_OP_CLEAR_COLOR,
  450.                                    clearrb_arg))
  451.          return FALSE;
  452.    }
  453.    else {
  454.       /* Put ourselves into the 'pre-clear' state, specifically to try
  455.        * and accumulate multiple clears to color and depth_stencil
  456.        * buffers which the app or state-tracker might issue
  457.        * separately.
  458.        */
  459.       set_scene_state( setup, SETUP_CLEARED, __FUNCTION__ );
  460.  
  461.       assert(PIPE_CLEAR_COLOR0 == (1 << 2));
  462.       setup->clear.flags |= 1 << (cbuf + 2);
  463.       setup->clear.color_val[cbuf] = uc;
  464.    }
  465.  
  466.    return TRUE;
  467. }
  468.  
  469. static boolean
  470. lp_setup_try_clear_zs(struct lp_setup_context *setup,
  471.                       double depth,
  472.                       unsigned stencil,
  473.                       unsigned flags)
  474. {
  475.    uint64_t zsmask = 0;
  476.    uint64_t zsvalue = 0;
  477.    uint32_t zmask32;
  478.    uint8_t smask8;
  479.  
  480.    LP_DBG(DEBUG_SETUP, "%s state %d\n", __FUNCTION__, setup->state);
  481.  
  482.    zmask32 = (flags & PIPE_CLEAR_DEPTH) ? ~0 : 0;
  483.    smask8 = (flags & PIPE_CLEAR_STENCIL) ? ~0 : 0;
  484.  
  485.    zsvalue = util_pack64_z_stencil(setup->fb.zsbuf->format,
  486.                                    depth,
  487.                                    stencil);
  488.  
  489.    zsmask = util_pack64_mask_z_stencil(setup->fb.zsbuf->format,
  490.                                        zmask32,
  491.                                        smask8);
  492.  
  493.    zsvalue &= zsmask;
  494.  
  495.    if (setup->state == SETUP_ACTIVE) {
  496.       struct lp_scene *scene = setup->scene;
  497.  
  498.       /* Add the clear to existing scene.  In the unusual case where
  499.        * both color and depth-stencil are being cleared when there's
  500.        * already been some rendering, we could discard the currently
  501.        * binned scene and start again, but I don't see that as being
  502.        * a common usage.
  503.        */
  504.       if (!lp_scene_bin_everywhere(scene,
  505.                                    LP_RAST_OP_CLEAR_ZSTENCIL,
  506.                                    lp_rast_arg_clearzs(zsvalue, zsmask)))
  507.          return FALSE;
  508.    }
  509.    else {
  510.       /* Put ourselves into the 'pre-clear' state, specifically to try
  511.        * and accumulate multiple clears to color and depth_stencil
  512.        * buffers which the app or state-tracker might issue
  513.        * separately.
  514.        */
  515.       set_scene_state( setup, SETUP_CLEARED, __FUNCTION__ );
  516.  
  517.       setup->clear.flags |= flags;
  518.  
  519.       setup->clear.zsmask |= zsmask;
  520.       setup->clear.zsvalue =
  521.          (setup->clear.zsvalue & ~zsmask) | (zsvalue & zsmask);
  522.    }
  523.  
  524.    return TRUE;
  525. }
  526.  
  527. void
  528. lp_setup_clear( struct lp_setup_context *setup,
  529.                 const union pipe_color_union *color,
  530.                 double depth,
  531.                 unsigned stencil,
  532.                 unsigned flags )
  533. {
  534.    unsigned i;
  535.  
  536.    /*
  537.     * Note any of these (max 9) clears could fail (but at most there should
  538.     * be just one failure!). This avoids doing the previous succeeded
  539.     * clears again (we still clear tiles twice if a clear command succeeded
  540.     * partially for one buffer).
  541.     */
  542.    if (flags & PIPE_CLEAR_DEPTHSTENCIL) {
  543.       unsigned flagszs = flags & PIPE_CLEAR_DEPTHSTENCIL;
  544.       if (!lp_setup_try_clear_zs(setup, depth, stencil, flagszs)) {
  545.          lp_setup_flush(setup, NULL, __FUNCTION__);
  546.  
  547.          if (!lp_setup_try_clear_zs(setup, depth, stencil, flagszs))
  548.             assert(0);
  549.       }
  550.    }
  551.  
  552.    if (flags & PIPE_CLEAR_COLOR) {
  553.       assert(PIPE_CLEAR_COLOR0 == (1 << 2));
  554.       for (i = 0; i < setup->fb.nr_cbufs; i++) {
  555.          if ((flags & (1 << (2 + i))) && setup->fb.cbufs[i]) {
  556.             if (!lp_setup_try_clear_color_buffer(setup, color, i)) {
  557.                lp_setup_flush(setup, NULL, __FUNCTION__);
  558.  
  559.                if (!lp_setup_try_clear_color_buffer(setup, color, i))
  560.                   assert(0);
  561.             }
  562.          }
  563.       }
  564.    }
  565. }
  566.  
  567.  
  568.  
  569. void
  570. lp_setup_set_triangle_state( struct lp_setup_context *setup,
  571.                              unsigned cull_mode,
  572.                              boolean ccw_is_frontface,
  573.                              boolean scissor,
  574.                              boolean half_pixel_center,
  575.                              boolean bottom_edge_rule)
  576. {
  577.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  578.  
  579.    setup->ccw_is_frontface = ccw_is_frontface;
  580.    setup->cullmode = cull_mode;
  581.    setup->triangle = first_triangle;
  582.    setup->pixel_offset = half_pixel_center ? 0.5f : 0.0f;
  583.    setup->bottom_edge_rule = bottom_edge_rule;
  584.  
  585.    if (setup->scissor_test != scissor) {
  586.       setup->dirty |= LP_SETUP_NEW_SCISSOR;
  587.       setup->scissor_test = scissor;
  588.    }
  589. }
  590.  
  591. void
  592. lp_setup_set_line_state( struct lp_setup_context *setup,
  593.                          float line_width)
  594. {
  595.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  596.  
  597.    setup->line_width = line_width;
  598. }
  599.  
  600. void
  601. lp_setup_set_point_state( struct lp_setup_context *setup,
  602.                           float point_size,
  603.                           boolean point_size_per_vertex,
  604.                           uint sprite_coord_enable,
  605.                           uint sprite_coord_origin)
  606. {
  607.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  608.  
  609.    setup->point_size = point_size;
  610.    setup->sprite_coord_enable = sprite_coord_enable;
  611.    setup->sprite_coord_origin = sprite_coord_origin;
  612.    setup->point_size_per_vertex = point_size_per_vertex;
  613. }
  614.  
  615. void
  616. lp_setup_set_setup_variant( struct lp_setup_context *setup,
  617.                             const struct lp_setup_variant *variant)
  618. {
  619.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  620.    
  621.    setup->setup.variant = variant;
  622. }
  623.  
  624. void
  625. lp_setup_set_fs_variant( struct lp_setup_context *setup,
  626.                          struct lp_fragment_shader_variant *variant)
  627. {
  628.    LP_DBG(DEBUG_SETUP, "%s %p\n", __FUNCTION__,
  629.           variant);
  630.    /* FIXME: reference count */
  631.  
  632.    setup->fs.current.variant = variant;
  633.    setup->dirty |= LP_SETUP_NEW_FS;
  634. }
  635.  
  636. void
  637. lp_setup_set_fs_constants(struct lp_setup_context *setup,
  638.                           unsigned num,
  639.                           struct pipe_constant_buffer *buffers)
  640. {
  641.    unsigned i;
  642.  
  643.    LP_DBG(DEBUG_SETUP, "%s %p\n", __FUNCTION__, (void *) buffers);
  644.  
  645.    assert(num <= Elements(setup->constants));
  646.  
  647.    for (i = 0; i < num; ++i) {
  648.       util_copy_constant_buffer(&setup->constants[i].current, &buffers[i]);
  649.    }
  650.    for (; i < Elements(setup->constants); i++) {
  651.       util_copy_constant_buffer(&setup->constants[i].current, NULL);
  652.    }
  653.    setup->dirty |= LP_SETUP_NEW_CONSTANTS;
  654. }
  655.  
  656.  
  657. void
  658. lp_setup_set_alpha_ref_value( struct lp_setup_context *setup,
  659.                               float alpha_ref_value )
  660. {
  661.    LP_DBG(DEBUG_SETUP, "%s %f\n", __FUNCTION__, alpha_ref_value);
  662.  
  663.    if(setup->fs.current.jit_context.alpha_ref_value != alpha_ref_value) {
  664.       setup->fs.current.jit_context.alpha_ref_value = alpha_ref_value;
  665.       setup->dirty |= LP_SETUP_NEW_FS;
  666.    }
  667. }
  668.  
  669. void
  670. lp_setup_set_stencil_ref_values( struct lp_setup_context *setup,
  671.                                  const ubyte refs[2] )
  672. {
  673.    LP_DBG(DEBUG_SETUP, "%s %d %d\n", __FUNCTION__, refs[0], refs[1]);
  674.  
  675.    if (setup->fs.current.jit_context.stencil_ref_front != refs[0] ||
  676.        setup->fs.current.jit_context.stencil_ref_back != refs[1]) {
  677.       setup->fs.current.jit_context.stencil_ref_front = refs[0];
  678.       setup->fs.current.jit_context.stencil_ref_back = refs[1];
  679.       setup->dirty |= LP_SETUP_NEW_FS;
  680.    }
  681. }
  682.  
  683. void
  684. lp_setup_set_blend_color( struct lp_setup_context *setup,
  685.                           const struct pipe_blend_color *blend_color )
  686. {
  687.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  688.  
  689.    assert(blend_color);
  690.  
  691.    if(memcmp(&setup->blend_color.current, blend_color, sizeof *blend_color) != 0) {
  692.       memcpy(&setup->blend_color.current, blend_color, sizeof *blend_color);
  693.       setup->dirty |= LP_SETUP_NEW_BLEND_COLOR;
  694.    }
  695. }
  696.  
  697.  
  698. void
  699. lp_setup_set_scissors( struct lp_setup_context *setup,
  700.                        const struct pipe_scissor_state *scissors )
  701. {
  702.    unsigned i;
  703.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  704.  
  705.    assert(scissors);
  706.  
  707.    for (i = 0; i < PIPE_MAX_VIEWPORTS; ++i) {
  708.       setup->scissors[i].x0 = scissors[i].minx;
  709.       setup->scissors[i].x1 = scissors[i].maxx-1;
  710.       setup->scissors[i].y0 = scissors[i].miny;
  711.       setup->scissors[i].y1 = scissors[i].maxy-1;
  712.    }
  713.    setup->dirty |= LP_SETUP_NEW_SCISSOR;
  714. }
  715.  
  716.  
  717. void
  718. lp_setup_set_flatshade_first( struct lp_setup_context *setup,
  719.                               boolean flatshade_first )
  720. {
  721.    setup->flatshade_first = flatshade_first;
  722. }
  723.  
  724. void
  725. lp_setup_set_rasterizer_discard( struct lp_setup_context *setup,
  726.                                  boolean rasterizer_discard )
  727. {
  728.    if (setup->rasterizer_discard != rasterizer_discard) {
  729.       setup->rasterizer_discard = rasterizer_discard;
  730.       set_scene_state( setup, SETUP_FLUSHED, __FUNCTION__ );
  731.    }
  732. }
  733.  
  734. void
  735. lp_setup_set_vertex_info( struct lp_setup_context *setup,
  736.                           struct vertex_info *vertex_info )
  737. {
  738.    /* XXX: just silently holding onto the pointer:
  739.     */
  740.    setup->vertex_info = vertex_info;
  741. }
  742.  
  743.  
  744. /**
  745.  * Called during state validation when LP_NEW_VIEWPORT is set.
  746.  */
  747. void
  748. lp_setup_set_viewports(struct lp_setup_context *setup,
  749.                        unsigned num_viewports,
  750.                        const struct pipe_viewport_state *viewports)
  751. {
  752.    struct llvmpipe_context *lp = llvmpipe_context(setup->pipe);
  753.    unsigned i;
  754.  
  755.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  756.  
  757.    assert(num_viewports <= PIPE_MAX_VIEWPORTS);
  758.    assert(viewports);
  759.  
  760.    /*
  761.     * For use in lp_state_fs.c, propagate the viewport values for all viewports.
  762.     */
  763.    for (i = 0; i < num_viewports; i++) {
  764.       float min_depth;
  765.       float max_depth;
  766.  
  767.       if (lp->rasterizer->clip_halfz == 0) {
  768.          float half_depth = viewports[i].scale[2];
  769.          min_depth = viewports[i].translate[2] - half_depth;
  770.          max_depth = min_depth + half_depth * 2.0f;
  771.       } else {
  772.          min_depth = viewports[i].translate[2];
  773.          max_depth = min_depth + viewports[i].scale[2];
  774.       }
  775.  
  776.       if (setup->viewports[i].min_depth != min_depth ||
  777.           setup->viewports[i].max_depth != max_depth) {
  778.           setup->viewports[i].min_depth = min_depth;
  779.           setup->viewports[i].max_depth = max_depth;
  780.           setup->dirty |= LP_SETUP_NEW_VIEWPORTS;
  781.       }
  782.    }
  783. }
  784.  
  785.  
  786. /**
  787.  * Called during state validation when LP_NEW_SAMPLER_VIEW is set.
  788.  */
  789. void
  790. lp_setup_set_fragment_sampler_views(struct lp_setup_context *setup,
  791.                                     unsigned num,
  792.                                     struct pipe_sampler_view **views)
  793. {
  794.    unsigned i;
  795.  
  796.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  797.  
  798.    assert(num <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
  799.  
  800.    for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
  801.       struct pipe_sampler_view *view = i < num ? views[i] : NULL;
  802.  
  803.       if (view) {
  804.          struct pipe_resource *res = view->texture;
  805.          struct llvmpipe_resource *lp_tex = llvmpipe_resource(res);
  806.          struct lp_jit_texture *jit_tex;
  807.          jit_tex = &setup->fs.current.jit_context.textures[i];
  808.  
  809.          /* We're referencing the texture's internal data, so save a
  810.           * reference to it.
  811.           */
  812.          pipe_resource_reference(&setup->fs.current_tex[i], res);
  813.  
  814.          if (!lp_tex->dt) {
  815.             /* regular texture - setup array of mipmap level offsets */
  816.             int j;
  817.             unsigned first_level = 0;
  818.             unsigned last_level = 0;
  819.  
  820.             if (llvmpipe_resource_is_texture(res)) {
  821.                first_level = view->u.tex.first_level;
  822.                last_level = view->u.tex.last_level;
  823.                assert(first_level <= last_level);
  824.                assert(last_level <= res->last_level);
  825.                jit_tex->base = lp_tex->tex_data;
  826.             }
  827.             else {
  828.               jit_tex->base = lp_tex->data;
  829.             }
  830.  
  831.             if (LP_PERF & PERF_TEX_MEM) {
  832.                /* use dummy tile memory */
  833.                jit_tex->base = lp_dummy_tile;
  834.                jit_tex->width = TILE_SIZE/8;
  835.                jit_tex->height = TILE_SIZE/8;
  836.                jit_tex->depth = 1;
  837.                jit_tex->first_level = 0;
  838.                jit_tex->last_level = 0;
  839.                jit_tex->mip_offsets[0] = 0;
  840.                jit_tex->row_stride[0] = 0;
  841.                jit_tex->img_stride[0] = 0;
  842.             }
  843.             else {
  844.                jit_tex->width = res->width0;
  845.                jit_tex->height = res->height0;
  846.                jit_tex->depth = res->depth0;
  847.                jit_tex->first_level = first_level;
  848.                jit_tex->last_level = last_level;
  849.  
  850.                if (llvmpipe_resource_is_texture(res)) {
  851.                   for (j = first_level; j <= last_level; j++) {
  852.                      jit_tex->mip_offsets[j] = lp_tex->mip_offsets[j];
  853.                      jit_tex->row_stride[j] = lp_tex->row_stride[j];
  854.                      jit_tex->img_stride[j] = lp_tex->img_stride[j];
  855.                   }
  856.  
  857.                   if (view->target == PIPE_TEXTURE_1D_ARRAY ||
  858.                       view->target == PIPE_TEXTURE_2D_ARRAY ||
  859.                       view->target == PIPE_TEXTURE_CUBE ||
  860.                       view->target == PIPE_TEXTURE_CUBE_ARRAY) {
  861.                      /*
  862.                       * For array textures, we don't have first_layer, instead
  863.                       * adjust last_layer (stored as depth) plus the mip level offsets
  864.                       * (as we have mip-first layout can't just adjust base ptr).
  865.                       * XXX For mip levels, could do something similar.
  866.                       */
  867.                      jit_tex->depth = view->u.tex.last_layer - view->u.tex.first_layer + 1;
  868.                      for (j = first_level; j <= last_level; j++) {
  869.                         jit_tex->mip_offsets[j] += view->u.tex.first_layer *
  870.                                                    lp_tex->img_stride[j];
  871.                      }
  872.                      if (view->target == PIPE_TEXTURE_CUBE ||
  873.                          view->target == PIPE_TEXTURE_CUBE_ARRAY) {
  874.                         assert(jit_tex->depth % 6 == 0);
  875.                      }
  876.                      assert(view->u.tex.first_layer <= view->u.tex.last_layer);
  877.                      assert(view->u.tex.last_layer < res->array_size);
  878.                   }
  879.                }
  880.                else {
  881.                   /*
  882.                    * For buffers, we don't have first_element, instead adjust
  883.                    * last_element (stored as width) plus the base pointer.
  884.                    */
  885.                   unsigned view_blocksize = util_format_get_blocksize(view->format);
  886.                   /* probably don't really need to fill that out */
  887.                   jit_tex->mip_offsets[0] = 0;
  888.                   jit_tex->row_stride[0] = 0;
  889.                   jit_tex->img_stride[0] = 0;
  890.  
  891.                   /* everything specified in number of elements here. */
  892.                   jit_tex->width = view->u.buf.last_element - view->u.buf.first_element + 1;
  893.                   jit_tex->base = (uint8_t *)jit_tex->base + view->u.buf.first_element *
  894.                                   view_blocksize;
  895.                   /* XXX Unsure if we need to sanitize parameters? */
  896.                   assert(view->u.buf.first_element <= view->u.buf.last_element);
  897.                   assert(view->u.buf.last_element * view_blocksize < res->width0);
  898.                }
  899.             }
  900.          }
  901.          else {
  902.             /* display target texture/surface */
  903.             /*
  904.              * XXX: Where should this be unmapped?
  905.              */
  906.             struct llvmpipe_screen *screen = llvmpipe_screen(res->screen);
  907.             struct sw_winsys *winsys = screen->winsys;
  908.             jit_tex->base = winsys->displaytarget_map(winsys, lp_tex->dt,
  909.                                                          PIPE_TRANSFER_READ);
  910.             jit_tex->row_stride[0] = lp_tex->row_stride[0];
  911.             jit_tex->img_stride[0] = lp_tex->img_stride[0];
  912.             jit_tex->mip_offsets[0] = 0;
  913.             jit_tex->width = res->width0;
  914.             jit_tex->height = res->height0;
  915.             jit_tex->depth = res->depth0;
  916.             jit_tex->first_level = jit_tex->last_level = 0;
  917.             assert(jit_tex->base);
  918.          }
  919.       }
  920.    }
  921.  
  922.    setup->dirty |= LP_SETUP_NEW_FS;
  923. }
  924.  
  925.  
  926. /**
  927.  * Called during state validation when LP_NEW_SAMPLER is set.
  928.  */
  929. void
  930. lp_setup_set_fragment_sampler_state(struct lp_setup_context *setup,
  931.                                     unsigned num,
  932.                                     struct pipe_sampler_state **samplers)
  933. {
  934.    unsigned i;
  935.  
  936.    LP_DBG(DEBUG_SETUP, "%s\n", __FUNCTION__);
  937.  
  938.    assert(num <= PIPE_MAX_SAMPLERS);
  939.  
  940.    for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
  941.       const struct pipe_sampler_state *sampler = i < num ? samplers[i] : NULL;
  942.  
  943.       if (sampler) {
  944.          struct lp_jit_sampler *jit_sam;
  945.          jit_sam = &setup->fs.current.jit_context.samplers[i];
  946.  
  947.          jit_sam->min_lod = sampler->min_lod;
  948.          jit_sam->max_lod = sampler->max_lod;
  949.          jit_sam->lod_bias = sampler->lod_bias;
  950.          COPY_4V(jit_sam->border_color, sampler->border_color.f);
  951.       }
  952.    }
  953.  
  954.    setup->dirty |= LP_SETUP_NEW_FS;
  955. }
  956.  
  957.  
  958. /**
  959.  * Is the given texture referenced by any scene?
  960.  * Note: we have to check all scenes including any scenes currently
  961.  * being rendered and the current scene being built.
  962.  */
  963. unsigned
  964. lp_setup_is_resource_referenced( const struct lp_setup_context *setup,
  965.                                 const struct pipe_resource *texture )
  966. {
  967.    unsigned i;
  968.  
  969.    /* check the render targets */
  970.    for (i = 0; i < setup->fb.nr_cbufs; i++) {
  971.       if (setup->fb.cbufs[i] && setup->fb.cbufs[i]->texture == texture)
  972.          return LP_REFERENCED_FOR_READ | LP_REFERENCED_FOR_WRITE;
  973.    }
  974.    if (setup->fb.zsbuf && setup->fb.zsbuf->texture == texture) {
  975.       return LP_REFERENCED_FOR_READ | LP_REFERENCED_FOR_WRITE;
  976.    }
  977.  
  978.    /* check textures referenced by the scene */
  979.    for (i = 0; i < Elements(setup->scenes); i++) {
  980.       if (lp_scene_is_resource_referenced(setup->scenes[i], texture)) {
  981.          return LP_REFERENCED_FOR_READ;
  982.       }
  983.    }
  984.  
  985.    return LP_UNREFERENCED;
  986. }
  987.  
  988.  
  989. /**
  990.  * Called by vbuf code when we're about to draw something.
  991.  *
  992.  * This function stores all dirty state in the current scene's display list
  993.  * memory, via lp_scene_alloc().  We can not pass pointers of mutable state to
  994.  * the JIT functions, as the JIT functions will be called later on, most likely
  995.  * on a different thread.
  996.  *
  997.  * When processing dirty state it is imperative that we don't refer to any
  998.  * pointers previously allocated with lp_scene_alloc() in this function (or any
  999.  * function) as they may belong to a scene freed since then.
  1000.  */
  1001. static boolean
  1002. try_update_scene_state( struct lp_setup_context *setup )
  1003. {
  1004.    static const float fake_const_buf[4];
  1005.    boolean new_scene = (setup->fs.stored == NULL);
  1006.    struct lp_scene *scene = setup->scene;
  1007.    unsigned i;
  1008.  
  1009.    assert(scene);
  1010.  
  1011.    if (setup->dirty & LP_SETUP_NEW_VIEWPORTS) {
  1012.       /*
  1013.        * Record new depth range state for changes due to viewport updates.
  1014.        *
  1015.        * TODO: Collapse the existing viewport and depth range information
  1016.        *       into one structure, for access by JIT.
  1017.        */
  1018.       struct lp_jit_viewport *stored;
  1019.  
  1020.       stored = (struct lp_jit_viewport *)
  1021.          lp_scene_alloc(scene, sizeof setup->viewports);
  1022.  
  1023.       if (!stored) {
  1024.          assert(!new_scene);
  1025.          return FALSE;
  1026.       }
  1027.  
  1028.       memcpy(stored, setup->viewports, sizeof setup->viewports);
  1029.  
  1030.       setup->fs.current.jit_context.viewports = stored;
  1031.       setup->dirty |= LP_SETUP_NEW_FS;
  1032.    }
  1033.  
  1034.    if(setup->dirty & LP_SETUP_NEW_BLEND_COLOR) {
  1035.       uint8_t *stored;
  1036.       float* fstored;
  1037.       unsigned i, j;
  1038.       unsigned size;
  1039.  
  1040.       /* Alloc u8_blend_color (16 x i8) and f_blend_color (4 or 8 x f32) */
  1041.       size  = 4 * 16 * sizeof(uint8_t);
  1042.       size += (LP_MAX_VECTOR_LENGTH / 4) * sizeof(float);
  1043.       stored = lp_scene_alloc_aligned(scene, size, LP_MIN_VECTOR_ALIGN);
  1044.  
  1045.       if (!stored) {
  1046.          assert(!new_scene);
  1047.          return FALSE;
  1048.       }
  1049.  
  1050.       /* Store floating point colour */
  1051.       fstored = (float*)(stored + 4*16);
  1052.       for (i = 0; i < (LP_MAX_VECTOR_LENGTH / 4); ++i) {
  1053.          fstored[i] = setup->blend_color.current.color[i % 4];
  1054.       }
  1055.  
  1056.       /* smear each blend color component across 16 ubyte elements */
  1057.       for (i = 0; i < 4; ++i) {
  1058.          uint8_t c = float_to_ubyte(setup->blend_color.current.color[i]);
  1059.          for (j = 0; j < 16; ++j)
  1060.             stored[i*16 + j] = c;
  1061.       }
  1062.  
  1063.       setup->blend_color.stored = stored;
  1064.       setup->fs.current.jit_context.u8_blend_color = stored;
  1065.       setup->fs.current.jit_context.f_blend_color = fstored;
  1066.       setup->dirty |= LP_SETUP_NEW_FS;
  1067.    }
  1068.  
  1069.    if (setup->dirty & LP_SETUP_NEW_CONSTANTS) {
  1070.       for (i = 0; i < Elements(setup->constants); ++i) {
  1071.          struct pipe_resource *buffer = setup->constants[i].current.buffer;
  1072.          const unsigned current_size = setup->constants[i].current.buffer_size;
  1073.          const ubyte *current_data = NULL;
  1074.          int num_constants;
  1075.  
  1076.          if (buffer) {
  1077.             /* resource buffer */
  1078.             current_data = (ubyte *) llvmpipe_resource_data(buffer);
  1079.          }
  1080.          else if (setup->constants[i].current.user_buffer) {
  1081.             /* user-space buffer */
  1082.             current_data = (ubyte *) setup->constants[i].current.user_buffer;
  1083.          }
  1084.  
  1085.          if (current_data) {
  1086.             current_data += setup->constants[i].current.buffer_offset;
  1087.  
  1088.             /* TODO: copy only the actually used constants? */
  1089.  
  1090.             if (setup->constants[i].stored_size != current_size ||
  1091.                !setup->constants[i].stored_data ||
  1092.                memcmp(setup->constants[i].stored_data,
  1093.                       current_data,
  1094.                       current_size) != 0) {
  1095.                void *stored;
  1096.  
  1097.                stored = lp_scene_alloc(scene, current_size);
  1098.                if (!stored) {
  1099.                   assert(!new_scene);
  1100.                   return FALSE;
  1101.                }
  1102.  
  1103.                memcpy(stored,
  1104.                       current_data,
  1105.                       current_size);
  1106.                setup->constants[i].stored_size = current_size;
  1107.                setup->constants[i].stored_data = stored;
  1108.             }
  1109.             setup->fs.current.jit_context.constants[i] =
  1110.                setup->constants[i].stored_data;
  1111.          }
  1112.          else {
  1113.             setup->constants[i].stored_size = 0;
  1114.             setup->constants[i].stored_data = NULL;
  1115.             setup->fs.current.jit_context.constants[i] = fake_const_buf;
  1116.          }
  1117.  
  1118.          num_constants =
  1119.             setup->constants[i].stored_size / (sizeof(float) * 4);
  1120.          setup->fs.current.jit_context.num_constants[i] = num_constants;
  1121.          setup->dirty |= LP_SETUP_NEW_FS;
  1122.       }
  1123.    }
  1124.  
  1125.  
  1126.    if (setup->dirty & LP_SETUP_NEW_FS) {
  1127.       if (!setup->fs.stored ||
  1128.           memcmp(setup->fs.stored,
  1129.                  &setup->fs.current,
  1130.                  sizeof setup->fs.current) != 0)
  1131.       {
  1132.          struct lp_rast_state *stored;
  1133.          
  1134.          /* The fs state that's been stored in the scene is different from
  1135.           * the new, current state.  So allocate a new lp_rast_state object
  1136.           * and append it to the bin's setup data buffer.
  1137.           */
  1138.          stored = (struct lp_rast_state *) lp_scene_alloc(scene, sizeof *stored);
  1139.          if (!stored) {
  1140.             assert(!new_scene);
  1141.             return FALSE;
  1142.          }
  1143.  
  1144.          memcpy(stored,
  1145.                 &setup->fs.current,
  1146.                 sizeof setup->fs.current);
  1147.          setup->fs.stored = stored;
  1148.          
  1149.          /* The scene now references the textures in the rasterization
  1150.           * state record.  Note that now.
  1151.           */
  1152.          for (i = 0; i < Elements(setup->fs.current_tex); i++) {
  1153.             if (setup->fs.current_tex[i]) {
  1154.                if (!lp_scene_add_resource_reference(scene,
  1155.                                                     setup->fs.current_tex[i],
  1156.                                                     new_scene)) {
  1157.                   assert(!new_scene);
  1158.                   return FALSE;
  1159.                }
  1160.             }
  1161.          }
  1162.       }
  1163.    }
  1164.  
  1165.    if (setup->dirty & LP_SETUP_NEW_SCISSOR) {
  1166.       unsigned i;
  1167.       for (i = 0; i < PIPE_MAX_VIEWPORTS; ++i) {
  1168.          setup->draw_regions[i] = setup->framebuffer;
  1169.          if (setup->scissor_test) {
  1170.             u_rect_possible_intersection(&setup->scissors[i],
  1171.                                          &setup->draw_regions[i]);
  1172.          }
  1173.       }
  1174.    }
  1175.  
  1176.    setup->dirty = 0;
  1177.  
  1178.    assert(setup->fs.stored);
  1179.    return TRUE;
  1180. }
  1181.  
  1182. boolean
  1183. lp_setup_update_state( struct lp_setup_context *setup,
  1184.                        boolean update_scene )
  1185. {
  1186.    /* Some of the 'draw' pipeline stages may have changed some driver state.
  1187.     * Make sure we've processed those state changes before anything else.
  1188.     *
  1189.     * XXX this is the only place where llvmpipe_context is used in the
  1190.     * setup code.  This may get refactored/changed...
  1191.     */
  1192.    {
  1193.       struct llvmpipe_context *lp = llvmpipe_context(setup->pipe);
  1194.       if (lp->dirty) {
  1195.          llvmpipe_update_derived(lp);
  1196.       }
  1197.  
  1198.       if (lp->setup->dirty) {
  1199.          llvmpipe_update_setup(lp);
  1200.       }
  1201.  
  1202.       assert(setup->setup.variant);
  1203.  
  1204.       /* Will probably need to move this somewhere else, just need  
  1205.        * to know about vertex shader point size attribute.
  1206.        */
  1207.       setup->psize = lp->psize_slot;
  1208.       setup->viewport_index_slot = lp->viewport_index_slot;
  1209.       setup->layer_slot = lp->layer_slot;
  1210.       setup->face_slot = lp->face_slot;
  1211.  
  1212.       assert(lp->dirty == 0);
  1213.  
  1214.       assert(lp->setup_variant.key.size ==
  1215.              setup->setup.variant->key.size);
  1216.  
  1217.       assert(memcmp(&lp->setup_variant.key,
  1218.                     &setup->setup.variant->key,
  1219.                     setup->setup.variant->key.size) == 0);
  1220.    }
  1221.  
  1222.    if (update_scene && setup->state != SETUP_ACTIVE) {
  1223.       if (!set_scene_state( setup, SETUP_ACTIVE, __FUNCTION__ ))
  1224.          return FALSE;
  1225.    }
  1226.  
  1227.    /* Only call into update_scene_state() if we already have a
  1228.     * scene:
  1229.     */
  1230.    if (update_scene && setup->scene) {
  1231.       assert(setup->state == SETUP_ACTIVE);
  1232.  
  1233.       if (try_update_scene_state(setup))
  1234.          return TRUE;
  1235.  
  1236.       /* Update failed, try to restart the scene.
  1237.        *
  1238.        * Cannot call lp_setup_flush_and_restart() directly here
  1239.        * because of potential recursion.
  1240.        */
  1241.       if (!set_scene_state(setup, SETUP_FLUSHED, __FUNCTION__))
  1242.          return FALSE;
  1243.  
  1244.       if (!set_scene_state(setup, SETUP_ACTIVE, __FUNCTION__))
  1245.          return FALSE;
  1246.  
  1247.       if (!setup->scene)
  1248.          return FALSE;
  1249.  
  1250.       return try_update_scene_state(setup);
  1251.    }
  1252.  
  1253.    return TRUE;
  1254. }
  1255.  
  1256.  
  1257.  
  1258. /* Only caller is lp_setup_vbuf_destroy()
  1259.  */
  1260. void
  1261. lp_setup_destroy( struct lp_setup_context *setup )
  1262. {
  1263.    uint i;
  1264.  
  1265.    lp_setup_reset( setup );
  1266.  
  1267.    util_unreference_framebuffer_state(&setup->fb);
  1268.  
  1269.    for (i = 0; i < Elements(setup->fs.current_tex); i++) {
  1270.       pipe_resource_reference(&setup->fs.current_tex[i], NULL);
  1271.    }
  1272.  
  1273.    for (i = 0; i < Elements(setup->constants); i++) {
  1274.       pipe_resource_reference(&setup->constants[i].current.buffer, NULL);
  1275.    }
  1276.  
  1277.    /* free the scenes in the 'empty' queue */
  1278.    for (i = 0; i < Elements(setup->scenes); i++) {
  1279.       struct lp_scene *scene = setup->scenes[i];
  1280.  
  1281.       if (scene->fence)
  1282.          lp_fence_wait(scene->fence);
  1283.  
  1284.       lp_scene_destroy(scene);
  1285.    }
  1286.  
  1287.    lp_fence_reference(&setup->last_fence, NULL);
  1288.  
  1289.    FREE( setup );
  1290. }
  1291.  
  1292.  
  1293. /**
  1294.  * Create a new primitive tiling engine.  Plug it into the backend of
  1295.  * the draw module.  Currently also creates a rasterizer to use with
  1296.  * it.
  1297.  */
  1298. struct lp_setup_context *
  1299. lp_setup_create( struct pipe_context *pipe,
  1300.                  struct draw_context *draw )
  1301. {
  1302.    struct llvmpipe_screen *screen = llvmpipe_screen(pipe->screen);
  1303.    struct lp_setup_context *setup;
  1304.    unsigned i;
  1305.  
  1306.    setup = CALLOC_STRUCT(lp_setup_context);
  1307.    if (!setup) {
  1308.       goto no_setup;
  1309.    }
  1310.  
  1311.    lp_setup_init_vbuf(setup);
  1312.    
  1313.    /* Used only in update_state():
  1314.     */
  1315.    setup->pipe = pipe;
  1316.  
  1317.  
  1318.    setup->num_threads = screen->num_threads;
  1319.    setup->vbuf = draw_vbuf_stage(draw, &setup->base);
  1320.    if (!setup->vbuf) {
  1321.       goto no_vbuf;
  1322.    }
  1323.  
  1324.    draw_set_rasterize_stage(draw, setup->vbuf);
  1325.    draw_set_render(draw, &setup->base);
  1326.  
  1327.    /* create some empty scenes */
  1328.    for (i = 0; i < MAX_SCENES; i++) {
  1329.       setup->scenes[i] = lp_scene_create( pipe );
  1330.       if (!setup->scenes[i]) {
  1331.          goto no_scenes;
  1332.       }
  1333.    }
  1334.  
  1335.    setup->triangle = first_triangle;
  1336.    setup->line     = first_line;
  1337.    setup->point    = first_point;
  1338.    
  1339.    setup->dirty = ~0;
  1340.  
  1341.    return setup;
  1342.  
  1343. no_scenes:
  1344.    for (i = 0; i < MAX_SCENES; i++) {
  1345.       if (setup->scenes[i]) {
  1346.          lp_scene_destroy(setup->scenes[i]);
  1347.       }
  1348.    }
  1349.  
  1350.    setup->vbuf->destroy(setup->vbuf);
  1351. no_vbuf:
  1352.    FREE(setup);
  1353. no_setup:
  1354.    return NULL;
  1355. }
  1356.  
  1357.  
  1358. /**
  1359.  * Put a BeginQuery command into all bins.
  1360.  */
  1361. void
  1362. lp_setup_begin_query(struct lp_setup_context *setup,
  1363.                      struct llvmpipe_query *pq)
  1364. {
  1365.  
  1366.    set_scene_state(setup, SETUP_ACTIVE, "begin_query");
  1367.  
  1368.    if (!(pq->type == PIPE_QUERY_OCCLUSION_COUNTER ||
  1369.          pq->type == PIPE_QUERY_OCCLUSION_PREDICATE ||
  1370.          pq->type == PIPE_QUERY_PIPELINE_STATISTICS))
  1371.       return;
  1372.  
  1373.    /* init the query to its beginning state */
  1374.    assert(setup->active_binned_queries < LP_MAX_ACTIVE_BINNED_QUERIES);
  1375.    /* exceeding list size so just ignore the query */
  1376.    if (setup->active_binned_queries >= LP_MAX_ACTIVE_BINNED_QUERIES) {
  1377.       return;
  1378.    }
  1379.    assert(setup->active_queries[setup->active_binned_queries] == NULL);
  1380.    setup->active_queries[setup->active_binned_queries] = pq;
  1381.    setup->active_binned_queries++;
  1382.  
  1383.    assert(setup->scene);
  1384.    if (setup->scene) {
  1385.       if (!lp_scene_bin_everywhere(setup->scene,
  1386.                                    LP_RAST_OP_BEGIN_QUERY,
  1387.                                    lp_rast_arg_query(pq))) {
  1388.  
  1389.          if (!lp_setup_flush_and_restart(setup))
  1390.             return;
  1391.  
  1392.          if (!lp_scene_bin_everywhere(setup->scene,
  1393.                                       LP_RAST_OP_BEGIN_QUERY,
  1394.                                       lp_rast_arg_query(pq))) {
  1395.             return;
  1396.          }
  1397.       }
  1398.       setup->scene->had_queries |= TRUE;
  1399.    }
  1400. }
  1401.  
  1402.  
  1403. /**
  1404.  * Put an EndQuery command into all bins.
  1405.  */
  1406. void
  1407. lp_setup_end_query(struct lp_setup_context *setup, struct llvmpipe_query *pq)
  1408. {
  1409.    set_scene_state(setup, SETUP_ACTIVE, "end_query");
  1410.  
  1411.    assert(setup->scene);
  1412.    if (setup->scene) {
  1413.       /* pq->fence should be the fence of the *last* scene which
  1414.        * contributed to the query result.
  1415.        */
  1416.       lp_fence_reference(&pq->fence, setup->scene->fence);
  1417.  
  1418.       if (pq->type == PIPE_QUERY_OCCLUSION_COUNTER ||
  1419.           pq->type == PIPE_QUERY_OCCLUSION_PREDICATE ||
  1420.           pq->type == PIPE_QUERY_PIPELINE_STATISTICS ||
  1421.           pq->type == PIPE_QUERY_TIMESTAMP) {
  1422.          if (pq->type == PIPE_QUERY_TIMESTAMP &&
  1423.                !(setup->scene->tiles_x | setup->scene->tiles_y)) {
  1424.             /*
  1425.              * If there's a zero width/height framebuffer, there's no bins and
  1426.              * hence no rast task is ever run. So fill in something here instead.
  1427.              */
  1428.             pq->end[0] = os_time_get_nano();
  1429.          }
  1430.  
  1431.          if (!lp_scene_bin_everywhere(setup->scene,
  1432.                                       LP_RAST_OP_END_QUERY,
  1433.                                       lp_rast_arg_query(pq))) {
  1434.             if (!lp_setup_flush_and_restart(setup))
  1435.                goto fail;
  1436.  
  1437.             if (!lp_scene_bin_everywhere(setup->scene,
  1438.                                          LP_RAST_OP_END_QUERY,
  1439.                                          lp_rast_arg_query(pq))) {
  1440.                goto fail;
  1441.             }
  1442.          }
  1443.          setup->scene->had_queries |= TRUE;
  1444.       }
  1445.    }
  1446.    else {
  1447.       lp_fence_reference(&pq->fence, setup->last_fence);
  1448.    }
  1449.  
  1450. fail:
  1451.    /* Need to do this now not earlier since it still needs to be marked as
  1452.     * active when binning it would cause a flush.
  1453.     */
  1454.    if (pq->type == PIPE_QUERY_OCCLUSION_COUNTER ||
  1455.       pq->type == PIPE_QUERY_OCCLUSION_PREDICATE ||
  1456.       pq->type == PIPE_QUERY_PIPELINE_STATISTICS) {
  1457.       unsigned i;
  1458.  
  1459.       /* remove from active binned query list */
  1460.       for (i = 0; i < setup->active_binned_queries; i++) {
  1461.          if (setup->active_queries[i] == pq)
  1462.             break;
  1463.       }
  1464.       assert(i < setup->active_binned_queries);
  1465.       if (i == setup->active_binned_queries)
  1466.          return;
  1467.       setup->active_binned_queries--;
  1468.       setup->active_queries[i] = setup->active_queries[setup->active_binned_queries];
  1469.       setup->active_queries[setup->active_binned_queries] = NULL;
  1470.    }
  1471. }
  1472.  
  1473.  
  1474. boolean
  1475. lp_setup_flush_and_restart(struct lp_setup_context *setup)
  1476. {
  1477.    if (0) debug_printf("%s\n", __FUNCTION__);
  1478.  
  1479.    assert(setup->state == SETUP_ACTIVE);
  1480.  
  1481.    if (!set_scene_state(setup, SETUP_FLUSHED, __FUNCTION__))
  1482.       return FALSE;
  1483.    
  1484.    if (!lp_setup_update_state(setup, TRUE))
  1485.       return FALSE;
  1486.  
  1487.    return TRUE;
  1488. }
  1489.  
  1490.  
  1491.