Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Blame | Last modification | View Log | RSS feed

  1. /*
  2.  * Mesa 3-D graphics library
  3.  *
  4.  * Copyright (C) 2012-2013 LunarG, Inc.
  5.  *
  6.  * Permission is hereby granted, free of charge, to any person obtaining a
  7.  * copy of this software and associated documentation files (the "Software"),
  8.  * to deal in the Software without restriction, including without limitation
  9.  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  10.  * and/or sell copies of the Software, and to permit persons to whom the
  11.  * Software is furnished to do so, subject to the following conditions:
  12.  *
  13.  * The above copyright notice and this permission notice shall be included
  14.  * in all copies or substantial portions of the Software.
  15.  *
  16.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17.  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18.  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  19.  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  20.  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  21.  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22.  * DEALINGS IN THE SOFTWARE.
  23.  *
  24.  * Authors:
  25.  *    Chia-I Wu <olv@lunarg.com>
  26.  */
  27.  
  28. #include "core/ilo_state_3d.h"
  29. #include "util/u_dynarray.h"
  30. #include "util/u_helpers.h"
  31. #include "util/u_upload_mgr.h"
  32.  
  33. #include "ilo_context.h"
  34. #include "ilo_resource.h"
  35. #include "ilo_shader.h"
  36. #include "ilo_state.h"
  37.  
  38. static void
  39. finalize_shader_states(struct ilo_state_vector *vec)
  40. {
  41.    unsigned type;
  42.  
  43.    for (type = 0; type < PIPE_SHADER_TYPES; type++) {
  44.       struct ilo_shader_state *shader;
  45.       uint32_t state;
  46.  
  47.       switch (type) {
  48.       case PIPE_SHADER_VERTEX:
  49.          shader = vec->vs;
  50.          state = ILO_DIRTY_VS;
  51.          break;
  52.       case PIPE_SHADER_GEOMETRY:
  53.          shader = vec->gs;
  54.          state = ILO_DIRTY_GS;
  55.          break;
  56.       case PIPE_SHADER_FRAGMENT:
  57.          shader = vec->fs;
  58.          state = ILO_DIRTY_FS;
  59.          break;
  60.       default:
  61.          shader = NULL;
  62.          state = 0;
  63.          break;
  64.       }
  65.  
  66.       if (!shader)
  67.          continue;
  68.  
  69.       /* compile if the shader or the states it depends on changed */
  70.       if (vec->dirty & state) {
  71.          ilo_shader_select_kernel(shader, vec, ILO_DIRTY_ALL);
  72.       }
  73.       else if (ilo_shader_select_kernel(shader, vec, vec->dirty)) {
  74.          /* mark the state dirty if a new kernel is selected */
  75.          vec->dirty |= state;
  76.       }
  77.  
  78.       /* need to setup SBE for FS */
  79.       if (type == PIPE_SHADER_FRAGMENT && vec->dirty &
  80.             (state | ILO_DIRTY_GS | ILO_DIRTY_VS | ILO_DIRTY_RASTERIZER)) {
  81.          if (ilo_shader_select_kernel_routing(shader,
  82.                (vec->gs) ? vec->gs : vec->vs, vec->rasterizer))
  83.             vec->dirty |= state;
  84.       }
  85.    }
  86. }
  87.  
  88. static void
  89. finalize_cbuf_state(struct ilo_context *ilo,
  90.                     struct ilo_cbuf_state *cbuf,
  91.                     const struct ilo_shader_state *sh)
  92. {
  93.    uint32_t upload_mask = cbuf->enabled_mask;
  94.  
  95.    /* skip CBUF0 if the kernel does not need it */
  96.    upload_mask &=
  97.       ~ilo_shader_get_kernel_param(sh, ILO_KERNEL_SKIP_CBUF0_UPLOAD);
  98.  
  99.    while (upload_mask) {
  100.       const enum pipe_format elem_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
  101.       unsigned offset, i;
  102.  
  103.       i = u_bit_scan(&upload_mask);
  104.       /* no need to upload */
  105.       if (cbuf->cso[i].resource)
  106.          continue;
  107.  
  108.       u_upload_data(ilo->uploader, 0, cbuf->cso[i].user_buffer_size,
  109.             cbuf->cso[i].user_buffer, &offset, &cbuf->cso[i].resource);
  110.  
  111.       ilo_gpe_init_view_surface_for_buffer(ilo->dev,
  112.             ilo_buffer(cbuf->cso[i].resource),
  113.             offset, cbuf->cso[i].user_buffer_size,
  114.             util_format_get_blocksize(elem_format), elem_format,
  115.             false, false, &cbuf->cso[i].surface);
  116.  
  117.       ilo->state_vector.dirty |= ILO_DIRTY_CBUF;
  118.    }
  119. }
  120.  
  121. static void
  122. finalize_constant_buffers(struct ilo_context *ilo)
  123. {
  124.    struct ilo_state_vector *vec = &ilo->state_vector;
  125.  
  126.    if (vec->dirty & (ILO_DIRTY_CBUF | ILO_DIRTY_VS))
  127.       finalize_cbuf_state(ilo, &vec->cbuf[PIPE_SHADER_VERTEX], vec->vs);
  128.  
  129.    if (ilo->state_vector.dirty & (ILO_DIRTY_CBUF | ILO_DIRTY_FS))
  130.       finalize_cbuf_state(ilo, &vec->cbuf[PIPE_SHADER_FRAGMENT], vec->fs);
  131. }
  132.  
  133. static void
  134. finalize_index_buffer(struct ilo_context *ilo)
  135. {
  136.    struct ilo_state_vector *vec = &ilo->state_vector;
  137.    const bool need_upload = (vec->draw->indexed &&
  138.          (vec->ib.user_buffer || vec->ib.offset % vec->ib.index_size));
  139.    struct pipe_resource *current_hw_res = NULL;
  140.  
  141.    if (!(vec->dirty & ILO_DIRTY_IB) && !need_upload)
  142.       return;
  143.  
  144.    pipe_resource_reference(&current_hw_res, vec->ib.hw_resource);
  145.  
  146.    if (need_upload) {
  147.       const unsigned offset = vec->ib.index_size * vec->draw->start;
  148.       const unsigned size = vec->ib.index_size * vec->draw->count;
  149.       unsigned hw_offset;
  150.  
  151.       if (vec->ib.user_buffer) {
  152.          u_upload_data(ilo->uploader, 0, size,
  153.                vec->ib.user_buffer + offset, &hw_offset, &vec->ib.hw_resource);
  154.       }
  155.       else {
  156.          u_upload_buffer(ilo->uploader, 0, vec->ib.offset + offset, size,
  157.                vec->ib.buffer, &hw_offset, &vec->ib.hw_resource);
  158.       }
  159.  
  160.       /* the HW offset should be aligned */
  161.       assert(hw_offset % vec->ib.index_size == 0);
  162.       vec->ib.draw_start_offset = hw_offset / vec->ib.index_size;
  163.  
  164.       /*
  165.        * INDEX[vec->draw->start] in the original buffer is INDEX[0] in the HW
  166.        * resource
  167.        */
  168.       vec->ib.draw_start_offset -= vec->draw->start;
  169.    }
  170.    else {
  171.       pipe_resource_reference(&vec->ib.hw_resource, vec->ib.buffer);
  172.  
  173.       /* note that index size may be zero when the draw is not indexed */
  174.       if (vec->draw->indexed)
  175.          vec->ib.draw_start_offset = vec->ib.offset / vec->ib.index_size;
  176.       else
  177.          vec->ib.draw_start_offset = 0;
  178.    }
  179.  
  180.    /* treat the IB as clean if the HW states do not change */
  181.    if (vec->ib.hw_resource == current_hw_res &&
  182.        vec->ib.hw_index_size == vec->ib.index_size)
  183.       vec->dirty &= ~ILO_DIRTY_IB;
  184.    else
  185.       vec->ib.hw_index_size = vec->ib.index_size;
  186.  
  187.    pipe_resource_reference(&current_hw_res, NULL);
  188. }
  189.  
  190. static void
  191. finalize_vertex_elements(struct ilo_context *ilo)
  192. {
  193.    struct ilo_state_vector *vec = &ilo->state_vector;
  194.  
  195.    if (!(vec->dirty & (ILO_DIRTY_VE | ILO_DIRTY_VS)))
  196.       return;
  197.  
  198.    vec->dirty |= ILO_DIRTY_VE;
  199.  
  200.    vec->ve->last_cso_edgeflag = false;
  201.    if (vec->ve->count && vec->vs &&
  202.          ilo_shader_get_kernel_param(vec->vs, ILO_KERNEL_VS_INPUT_EDGEFLAG)) {
  203.       vec->ve->edgeflag_cso = vec->ve->cso[vec->ve->count - 1];
  204.       ilo_gpe_set_ve_edgeflag(ilo->dev, &vec->ve->edgeflag_cso);
  205.       vec->ve->last_cso_edgeflag = true;
  206.    }
  207.  
  208.    vec->ve->prepend_nosrc_cso = false;
  209.    if (vec->vs &&
  210.        (ilo_shader_get_kernel_param(vec->vs,
  211.                                     ILO_KERNEL_VS_INPUT_INSTANCEID) ||
  212.         ilo_shader_get_kernel_param(vec->vs,
  213.                                     ILO_KERNEL_VS_INPUT_VERTEXID))) {
  214.       ilo_gpe_init_ve_nosrc(ilo->dev,
  215.             GEN6_VFCOMP_STORE_VID,
  216.             GEN6_VFCOMP_STORE_IID,
  217.             GEN6_VFCOMP_NOSTORE,
  218.             GEN6_VFCOMP_NOSTORE,
  219.             &vec->ve->nosrc_cso);
  220.       vec->ve->prepend_nosrc_cso = true;
  221.    } else if (!vec->vs) {
  222.       /* generate VUE header */
  223.       ilo_gpe_init_ve_nosrc(ilo->dev,
  224.             GEN6_VFCOMP_STORE_0, /* Reserved */
  225.             GEN6_VFCOMP_STORE_0, /* Render Target Array Index */
  226.             GEN6_VFCOMP_STORE_0, /* Viewport Index */
  227.             GEN6_VFCOMP_STORE_0, /* Point Width */
  228.             &vec->ve->nosrc_cso);
  229.       vec->ve->prepend_nosrc_cso = true;
  230.    } else if (!vec->ve->count) {
  231.       /*
  232.        * From the Sandy Bridge PRM, volume 2 part 1, page 92:
  233.        *
  234.        *    "SW must ensure that at least one vertex element is defined prior
  235.        *     to issuing a 3DPRIMTIVE command, or operation is UNDEFINED."
  236.        */
  237.       ilo_gpe_init_ve_nosrc(ilo->dev,
  238.             GEN6_VFCOMP_STORE_0,
  239.             GEN6_VFCOMP_STORE_0,
  240.             GEN6_VFCOMP_STORE_0,
  241.             GEN6_VFCOMP_STORE_1_FP,
  242.             &vec->ve->nosrc_cso);
  243.       vec->ve->prepend_nosrc_cso = true;
  244.    }
  245. }
  246.  
  247. /**
  248.  * Finalize states.  Some states depend on other states and are
  249.  * incomplete/invalid until finalized.
  250.  */
  251. void
  252. ilo_finalize_3d_states(struct ilo_context *ilo,
  253.                        const struct pipe_draw_info *draw)
  254. {
  255.    ilo->state_vector.draw = draw;
  256.  
  257.    finalize_shader_states(&ilo->state_vector);
  258.    finalize_constant_buffers(ilo);
  259.    finalize_index_buffer(ilo);
  260.    finalize_vertex_elements(ilo);
  261.  
  262.    u_upload_unmap(ilo->uploader);
  263. }
  264.  
  265. static void
  266. finalize_global_binding(struct ilo_state_vector *vec)
  267. {
  268.    struct ilo_shader_state *cs = vec->cs;
  269.    int base, count, shift;
  270.    int i;
  271.  
  272.    count = ilo_shader_get_kernel_param(cs,
  273.          ILO_KERNEL_CS_SURFACE_GLOBAL_COUNT);
  274.    if (!count)
  275.       return;
  276.  
  277.    base = ilo_shader_get_kernel_param(cs, ILO_KERNEL_CS_SURFACE_GLOBAL_BASE);
  278.    shift = 32 - util_last_bit(base + count - 1);
  279.  
  280.    if (count > vec->global_binding.count)
  281.       count = vec->global_binding.count;
  282.  
  283.    for (i = 0; i < count; i++) {
  284.       struct ilo_global_binding_cso *cso =
  285.          util_dynarray_element(&vec->global_binding.bindings,
  286.                struct ilo_global_binding_cso, i);
  287.       const uint32_t offset = *cso->handle & ((1 << shift) - 1);
  288.  
  289.       *cso->handle = ((base + i) << shift) | offset;
  290.    }
  291. }
  292.  
  293. void
  294. ilo_finalize_compute_states(struct ilo_context *ilo)
  295. {
  296.    finalize_global_binding(&ilo->state_vector);
  297. }
  298.  
  299. static void *
  300. ilo_create_blend_state(struct pipe_context *pipe,
  301.                        const struct pipe_blend_state *state)
  302. {
  303.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  304.    struct ilo_blend_state *blend;
  305.  
  306.    blend = MALLOC_STRUCT(ilo_blend_state);
  307.    assert(blend);
  308.  
  309.    ilo_gpe_init_blend(dev, state, blend);
  310.  
  311.    return blend;
  312. }
  313.  
  314. static void
  315. ilo_bind_blend_state(struct pipe_context *pipe, void *state)
  316. {
  317.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  318.  
  319.    vec->blend = state;
  320.  
  321.    vec->dirty |= ILO_DIRTY_BLEND;
  322. }
  323.  
  324. static void
  325. ilo_delete_blend_state(struct pipe_context *pipe, void  *state)
  326. {
  327.    FREE(state);
  328. }
  329.  
  330. static void *
  331. ilo_create_sampler_state(struct pipe_context *pipe,
  332.                          const struct pipe_sampler_state *state)
  333. {
  334.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  335.    struct ilo_sampler_cso *sampler;
  336.  
  337.    sampler = MALLOC_STRUCT(ilo_sampler_cso);
  338.    assert(sampler);
  339.  
  340.    ilo_gpe_init_sampler_cso(dev, state, sampler);
  341.  
  342.    return sampler;
  343. }
  344.  
  345. static void
  346. ilo_bind_sampler_states(struct pipe_context *pipe, unsigned shader,
  347.                         unsigned start, unsigned count, void **samplers)
  348. {
  349.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  350.    struct ilo_sampler_state *dst = &vec->sampler[shader];
  351.    bool changed = false;
  352.    unsigned i;
  353.  
  354.    assert(start + count <= Elements(dst->cso));
  355.  
  356.    if (samplers) {
  357.       for (i = 0; i < count; i++) {
  358.          if (dst->cso[start + i] != samplers[i]) {
  359.             dst->cso[start + i] = samplers[i];
  360.  
  361.             /*
  362.              * This function is sometimes called to reduce the number of bound
  363.              * samplers.  Do not consider that as a state change (and create a
  364.              * new array of SAMPLER_STATE).
  365.              */
  366.             if (samplers[i])
  367.                changed = true;
  368.          }
  369.       }
  370.    }
  371.    else {
  372.       for (i = 0; i < count; i++)
  373.          dst->cso[start + i] = NULL;
  374.    }
  375.  
  376.    if (changed) {
  377.       switch (shader) {
  378.       case PIPE_SHADER_VERTEX:
  379.          vec->dirty |= ILO_DIRTY_SAMPLER_VS;
  380.          break;
  381.       case PIPE_SHADER_GEOMETRY:
  382.          vec->dirty |= ILO_DIRTY_SAMPLER_GS;
  383.          break;
  384.       case PIPE_SHADER_FRAGMENT:
  385.          vec->dirty |= ILO_DIRTY_SAMPLER_FS;
  386.          break;
  387.       case PIPE_SHADER_COMPUTE:
  388.          vec->dirty |= ILO_DIRTY_SAMPLER_CS;
  389.          break;
  390.       }
  391.    }
  392. }
  393.  
  394. static void
  395. ilo_delete_sampler_state(struct pipe_context *pipe, void *state)
  396. {
  397.    FREE(state);
  398. }
  399.  
  400. static void *
  401. ilo_create_rasterizer_state(struct pipe_context *pipe,
  402.                             const struct pipe_rasterizer_state *state)
  403. {
  404.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  405.    struct ilo_rasterizer_state *rast;
  406.  
  407.    rast = MALLOC_STRUCT(ilo_rasterizer_state);
  408.    assert(rast);
  409.  
  410.    rast->state = *state;
  411.    ilo_gpe_init_rasterizer(dev, state, rast);
  412.  
  413.    return rast;
  414. }
  415.  
  416. static void
  417. ilo_bind_rasterizer_state(struct pipe_context *pipe, void *state)
  418. {
  419.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  420.  
  421.    vec->rasterizer = state;
  422.  
  423.    vec->dirty |= ILO_DIRTY_RASTERIZER;
  424. }
  425.  
  426. static void
  427. ilo_delete_rasterizer_state(struct pipe_context *pipe, void *state)
  428. {
  429.    FREE(state);
  430. }
  431.  
  432. static void *
  433. ilo_create_depth_stencil_alpha_state(struct pipe_context *pipe,
  434.                                      const struct pipe_depth_stencil_alpha_state *state)
  435. {
  436.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  437.    struct ilo_dsa_state *dsa;
  438.  
  439.    dsa = MALLOC_STRUCT(ilo_dsa_state);
  440.    assert(dsa);
  441.  
  442.    ilo_gpe_init_dsa(dev, state, dsa);
  443.  
  444.    return dsa;
  445. }
  446.  
  447. static void
  448. ilo_bind_depth_stencil_alpha_state(struct pipe_context *pipe, void *state)
  449. {
  450.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  451.  
  452.    vec->dsa = state;
  453.  
  454.    vec->dirty |= ILO_DIRTY_DSA;
  455. }
  456.  
  457. static void
  458. ilo_delete_depth_stencil_alpha_state(struct pipe_context *pipe, void *state)
  459. {
  460.    FREE(state);
  461. }
  462.  
  463. static void *
  464. ilo_create_fs_state(struct pipe_context *pipe,
  465.                     const struct pipe_shader_state *state)
  466. {
  467.    struct ilo_context *ilo = ilo_context(pipe);
  468.    struct ilo_shader_state *shader;
  469.  
  470.    shader = ilo_shader_create_fs(ilo->dev, state, &ilo->state_vector);
  471.    assert(shader);
  472.  
  473.    ilo_shader_cache_add(ilo->shader_cache, shader);
  474.  
  475.    return shader;
  476. }
  477.  
  478. static void
  479. ilo_bind_fs_state(struct pipe_context *pipe, void *state)
  480. {
  481.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  482.  
  483.    vec->fs = state;
  484.  
  485.    vec->dirty |= ILO_DIRTY_FS;
  486. }
  487.  
  488. static void
  489. ilo_delete_fs_state(struct pipe_context *pipe, void *state)
  490. {
  491.    struct ilo_context *ilo = ilo_context(pipe);
  492.    struct ilo_shader_state *fs = (struct ilo_shader_state *) state;
  493.  
  494.    ilo_shader_cache_remove(ilo->shader_cache, fs);
  495.    ilo_shader_destroy(fs);
  496. }
  497.  
  498. static void *
  499. ilo_create_vs_state(struct pipe_context *pipe,
  500.                     const struct pipe_shader_state *state)
  501. {
  502.    struct ilo_context *ilo = ilo_context(pipe);
  503.    struct ilo_shader_state *shader;
  504.  
  505.    shader = ilo_shader_create_vs(ilo->dev, state, &ilo->state_vector);
  506.    assert(shader);
  507.  
  508.    ilo_shader_cache_add(ilo->shader_cache, shader);
  509.  
  510.    return shader;
  511. }
  512.  
  513. static void
  514. ilo_bind_vs_state(struct pipe_context *pipe, void *state)
  515. {
  516.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  517.  
  518.    vec->vs = state;
  519.  
  520.    vec->dirty |= ILO_DIRTY_VS;
  521. }
  522.  
  523. static void
  524. ilo_delete_vs_state(struct pipe_context *pipe, void *state)
  525. {
  526.    struct ilo_context *ilo = ilo_context(pipe);
  527.    struct ilo_shader_state *vs = (struct ilo_shader_state *) state;
  528.  
  529.    ilo_shader_cache_remove(ilo->shader_cache, vs);
  530.    ilo_shader_destroy(vs);
  531. }
  532.  
  533. static void *
  534. ilo_create_gs_state(struct pipe_context *pipe,
  535.                     const struct pipe_shader_state *state)
  536. {
  537.    struct ilo_context *ilo = ilo_context(pipe);
  538.    struct ilo_shader_state *shader;
  539.  
  540.    shader = ilo_shader_create_gs(ilo->dev, state, &ilo->state_vector);
  541.    assert(shader);
  542.  
  543.    ilo_shader_cache_add(ilo->shader_cache, shader);
  544.  
  545.    return shader;
  546. }
  547.  
  548. static void
  549. ilo_bind_gs_state(struct pipe_context *pipe, void *state)
  550. {
  551.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  552.  
  553.    /* util_blitter may set this unnecessarily */
  554.    if (vec->gs == state)
  555.       return;
  556.  
  557.    vec->gs = state;
  558.  
  559.    vec->dirty |= ILO_DIRTY_GS;
  560. }
  561.  
  562. static void
  563. ilo_delete_gs_state(struct pipe_context *pipe, void *state)
  564. {
  565.    struct ilo_context *ilo = ilo_context(pipe);
  566.    struct ilo_shader_state *gs = (struct ilo_shader_state *) state;
  567.  
  568.    ilo_shader_cache_remove(ilo->shader_cache, gs);
  569.    ilo_shader_destroy(gs);
  570. }
  571.  
  572. static void *
  573. ilo_create_vertex_elements_state(struct pipe_context *pipe,
  574.                                  unsigned num_elements,
  575.                                  const struct pipe_vertex_element *elements)
  576. {
  577.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  578.    struct ilo_ve_state *ve;
  579.  
  580.    ve = MALLOC_STRUCT(ilo_ve_state);
  581.    assert(ve);
  582.  
  583.    ilo_gpe_init_ve(dev, num_elements, elements, ve);
  584.  
  585.    return ve;
  586. }
  587.  
  588. static void
  589. ilo_bind_vertex_elements_state(struct pipe_context *pipe, void *state)
  590. {
  591.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  592.  
  593.    vec->ve = state;
  594.  
  595.    vec->dirty |= ILO_DIRTY_VE;
  596. }
  597.  
  598. static void
  599. ilo_delete_vertex_elements_state(struct pipe_context *pipe, void *state)
  600. {
  601.    struct ilo_ve_state *ve = state;
  602.  
  603.    FREE(ve);
  604. }
  605.  
  606. static void
  607. ilo_set_blend_color(struct pipe_context *pipe,
  608.                     const struct pipe_blend_color *state)
  609. {
  610.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  611.  
  612.    vec->blend_color = *state;
  613.  
  614.    vec->dirty |= ILO_DIRTY_BLEND_COLOR;
  615. }
  616.  
  617. static void
  618. ilo_set_stencil_ref(struct pipe_context *pipe,
  619.                     const struct pipe_stencil_ref *state)
  620. {
  621.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  622.  
  623.    /* util_blitter may set this unnecessarily */
  624.    if (!memcmp(&vec->stencil_ref, state, sizeof(*state)))
  625.       return;
  626.  
  627.    vec->stencil_ref = *state;
  628.  
  629.    vec->dirty |= ILO_DIRTY_STENCIL_REF;
  630. }
  631.  
  632. static void
  633. ilo_set_sample_mask(struct pipe_context *pipe,
  634.                     unsigned sample_mask)
  635. {
  636.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  637.  
  638.    /* util_blitter may set this unnecessarily */
  639.    if (vec->sample_mask == sample_mask)
  640.       return;
  641.  
  642.    vec->sample_mask = sample_mask;
  643.  
  644.    vec->dirty |= ILO_DIRTY_SAMPLE_MASK;
  645. }
  646.  
  647. static void
  648. ilo_set_clip_state(struct pipe_context *pipe,
  649.                    const struct pipe_clip_state *state)
  650. {
  651.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  652.  
  653.    vec->clip = *state;
  654.  
  655.    vec->dirty |= ILO_DIRTY_CLIP;
  656. }
  657.  
  658. static void
  659. ilo_set_constant_buffer(struct pipe_context *pipe,
  660.                         uint shader, uint index,
  661.                         struct pipe_constant_buffer *buf)
  662. {
  663.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  664.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  665.    struct ilo_cbuf_state *cbuf = &vec->cbuf[shader];
  666.    const unsigned count = 1;
  667.    unsigned i;
  668.  
  669.    assert(shader < Elements(vec->cbuf));
  670.    assert(index + count <= Elements(vec->cbuf[shader].cso));
  671.  
  672.    if (buf) {
  673.       for (i = 0; i < count; i++) {
  674.          struct ilo_cbuf_cso *cso = &cbuf->cso[index + i];
  675.  
  676.          pipe_resource_reference(&cso->resource, buf[i].buffer);
  677.  
  678.          if (buf[i].buffer) {
  679.             const enum pipe_format elem_format =
  680.                PIPE_FORMAT_R32G32B32A32_FLOAT;
  681.  
  682.             ilo_gpe_init_view_surface_for_buffer(dev,
  683.                   ilo_buffer(buf[i].buffer),
  684.                   buf[i].buffer_offset, buf[i].buffer_size,
  685.                   util_format_get_blocksize(elem_format), elem_format,
  686.                   false, false, &cso->surface);
  687.  
  688.             cso->user_buffer = NULL;
  689.             cso->user_buffer_size = 0;
  690.  
  691.             cbuf->enabled_mask |= 1 << (index + i);
  692.          }
  693.          else if (buf[i].user_buffer) {
  694.             cso->surface.bo = NULL;
  695.  
  696.             /* buffer_offset does not apply for user buffer */
  697.             cso->user_buffer = buf[i].user_buffer;
  698.             cso->user_buffer_size = buf[i].buffer_size;
  699.  
  700.             cbuf->enabled_mask |= 1 << (index + i);
  701.          }
  702.          else {
  703.             cso->surface.bo = NULL;
  704.             cso->user_buffer = NULL;
  705.             cso->user_buffer_size = 0;
  706.  
  707.             cbuf->enabled_mask &= ~(1 << (index + i));
  708.          }
  709.       }
  710.    }
  711.    else {
  712.       for (i = 0; i < count; i++) {
  713.          struct ilo_cbuf_cso *cso = &cbuf->cso[index + i];
  714.  
  715.          pipe_resource_reference(&cso->resource, NULL);
  716.          cso->surface.bo = NULL;
  717.          cso->user_buffer = NULL;
  718.          cso->user_buffer_size = 0;
  719.  
  720.          cbuf->enabled_mask &= ~(1 << (index + i));
  721.       }
  722.    }
  723.  
  724.    vec->dirty |= ILO_DIRTY_CBUF;
  725. }
  726.  
  727. static void
  728. ilo_set_framebuffer_state(struct pipe_context *pipe,
  729.                           const struct pipe_framebuffer_state *state)
  730. {
  731.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  732.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  733.  
  734.    ilo_gpe_set_fb(dev, state, &vec->fb);
  735.  
  736.    vec->dirty |= ILO_DIRTY_FB;
  737. }
  738.  
  739. static void
  740. ilo_set_polygon_stipple(struct pipe_context *pipe,
  741.                         const struct pipe_poly_stipple *state)
  742. {
  743.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  744.  
  745.    vec->poly_stipple = *state;
  746.  
  747.    vec->dirty |= ILO_DIRTY_POLY_STIPPLE;
  748. }
  749.  
  750. static void
  751. ilo_set_scissor_states(struct pipe_context *pipe,
  752.                        unsigned start_slot,
  753.                        unsigned num_scissors,
  754.                        const struct pipe_scissor_state *scissors)
  755. {
  756.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  757.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  758.  
  759.    ilo_gpe_set_scissor(dev, start_slot, num_scissors,
  760.          scissors, &vec->scissor);
  761.  
  762.    vec->dirty |= ILO_DIRTY_SCISSOR;
  763. }
  764.  
  765. static void
  766. ilo_set_viewport_states(struct pipe_context *pipe,
  767.                         unsigned start_slot,
  768.                         unsigned num_viewports,
  769.                         const struct pipe_viewport_state *viewports)
  770. {
  771.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  772.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  773.  
  774.    if (viewports) {
  775.       unsigned i;
  776.  
  777.       for (i = 0; i < num_viewports; i++) {
  778.          ilo_gpe_set_viewport_cso(dev, &viewports[i],
  779.                &vec->viewport.cso[start_slot + i]);
  780.       }
  781.  
  782.       if (vec->viewport.count < start_slot + num_viewports)
  783.          vec->viewport.count = start_slot + num_viewports;
  784.  
  785.       /* need to save viewport 0 for util_blitter */
  786.       if (!start_slot && num_viewports)
  787.          vec->viewport.viewport0 = viewports[0];
  788.    }
  789.    else {
  790.       if (vec->viewport.count <= start_slot + num_viewports &&
  791.           vec->viewport.count > start_slot)
  792.          vec->viewport.count = start_slot;
  793.    }
  794.  
  795.    vec->dirty |= ILO_DIRTY_VIEWPORT;
  796. }
  797.  
  798. static void
  799. ilo_set_sampler_views(struct pipe_context *pipe, unsigned shader,
  800.                       unsigned start, unsigned count,
  801.                       struct pipe_sampler_view **views)
  802. {
  803.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  804.    struct ilo_view_state *dst = &vec->view[shader];
  805.    unsigned i;
  806.  
  807.    assert(start + count <= Elements(dst->states));
  808.  
  809.    if (views) {
  810.       for (i = 0; i < count; i++)
  811.          pipe_sampler_view_reference(&dst->states[start + i], views[i]);
  812.    }
  813.    else {
  814.       for (i = 0; i < count; i++)
  815.          pipe_sampler_view_reference(&dst->states[start + i], NULL);
  816.    }
  817.  
  818.    if (dst->count <= start + count) {
  819.       if (views)
  820.          count += start;
  821.       else
  822.          count = start;
  823.  
  824.       while (count > 0 && !dst->states[count - 1])
  825.          count--;
  826.  
  827.       dst->count = count;
  828.    }
  829.  
  830.    switch (shader) {
  831.    case PIPE_SHADER_VERTEX:
  832.       vec->dirty |= ILO_DIRTY_VIEW_VS;
  833.       break;
  834.    case PIPE_SHADER_GEOMETRY:
  835.       vec->dirty |= ILO_DIRTY_VIEW_GS;
  836.       break;
  837.    case PIPE_SHADER_FRAGMENT:
  838.       vec->dirty |= ILO_DIRTY_VIEW_FS;
  839.       break;
  840.    case PIPE_SHADER_COMPUTE:
  841.       vec->dirty |= ILO_DIRTY_VIEW_CS;
  842.       break;
  843.    }
  844. }
  845.  
  846. static void
  847. ilo_set_shader_resources(struct pipe_context *pipe,
  848.                          unsigned start, unsigned count,
  849.                          struct pipe_surface **surfaces)
  850. {
  851.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  852.    struct ilo_resource_state *dst = &vec->resource;
  853.    unsigned i;
  854.  
  855.    assert(start + count <= Elements(dst->states));
  856.  
  857.    if (surfaces) {
  858.       for (i = 0; i < count; i++)
  859.          pipe_surface_reference(&dst->states[start + i], surfaces[i]);
  860.    }
  861.    else {
  862.       for (i = 0; i < count; i++)
  863.          pipe_surface_reference(&dst->states[start + i], NULL);
  864.    }
  865.  
  866.    if (dst->count <= start + count) {
  867.       if (surfaces)
  868.          count += start;
  869.       else
  870.          count = start;
  871.  
  872.       while (count > 0 && !dst->states[count - 1])
  873.          count--;
  874.  
  875.       dst->count = count;
  876.    }
  877.  
  878.    vec->dirty |= ILO_DIRTY_RESOURCE;
  879. }
  880.  
  881. static void
  882. ilo_set_vertex_buffers(struct pipe_context *pipe,
  883.                        unsigned start_slot, unsigned num_buffers,
  884.                        const struct pipe_vertex_buffer *buffers)
  885. {
  886.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  887.    unsigned i;
  888.  
  889.    /* no PIPE_CAP_USER_VERTEX_BUFFERS */
  890.    if (buffers) {
  891.       for (i = 0; i < num_buffers; i++)
  892.          assert(!buffers[i].user_buffer);
  893.    }
  894.  
  895.    util_set_vertex_buffers_mask(vec->vb.states,
  896.          &vec->vb.enabled_mask, buffers, start_slot, num_buffers);
  897.  
  898.    vec->dirty |= ILO_DIRTY_VB;
  899. }
  900.  
  901. static void
  902. ilo_set_index_buffer(struct pipe_context *pipe,
  903.                      const struct pipe_index_buffer *state)
  904. {
  905.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  906.  
  907.    if (state) {
  908.       pipe_resource_reference(&vec->ib.buffer, state->buffer);
  909.       vec->ib.user_buffer = state->user_buffer;
  910.       vec->ib.offset = state->offset;
  911.       vec->ib.index_size = state->index_size;
  912.    }
  913.    else {
  914.       pipe_resource_reference(&vec->ib.buffer, NULL);
  915.       vec->ib.user_buffer = NULL;
  916.       vec->ib.offset = 0;
  917.       vec->ib.index_size = 0;
  918.    }
  919.  
  920.    vec->dirty |= ILO_DIRTY_IB;
  921. }
  922.  
  923. static struct pipe_stream_output_target *
  924. ilo_create_stream_output_target(struct pipe_context *pipe,
  925.                                 struct pipe_resource *res,
  926.                                 unsigned buffer_offset,
  927.                                 unsigned buffer_size)
  928. {
  929.    struct pipe_stream_output_target *target;
  930.  
  931.    target = MALLOC_STRUCT(pipe_stream_output_target);
  932.    assert(target);
  933.  
  934.    pipe_reference_init(&target->reference, 1);
  935.    target->buffer = NULL;
  936.    pipe_resource_reference(&target->buffer, res);
  937.    target->context = pipe;
  938.    target->buffer_offset = buffer_offset;
  939.    target->buffer_size = buffer_size;
  940.  
  941.    return target;
  942. }
  943.  
  944. static void
  945. ilo_set_stream_output_targets(struct pipe_context *pipe,
  946.                               unsigned num_targets,
  947.                               struct pipe_stream_output_target **targets,
  948.                               const unsigned *offset)
  949. {
  950.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  951.    unsigned i;
  952.    unsigned append_bitmask = 0;
  953.  
  954.    if (!targets)
  955.       num_targets = 0;
  956.  
  957.    /* util_blitter may set this unnecessarily */
  958.    if (!vec->so.count && !num_targets)
  959.       return;
  960.  
  961.    for (i = 0; i < num_targets; i++) {
  962.       pipe_so_target_reference(&vec->so.states[i], targets[i]);
  963.       if (offset[i] == (unsigned)-1)
  964.          append_bitmask |= 1 << i;
  965.    }
  966.  
  967.    for (; i < vec->so.count; i++)
  968.       pipe_so_target_reference(&vec->so.states[i], NULL);
  969.  
  970.    vec->so.count = num_targets;
  971.    vec->so.append_bitmask = append_bitmask;
  972.  
  973.    vec->so.enabled = (vec->so.count > 0);
  974.  
  975.    vec->dirty |= ILO_DIRTY_SO;
  976. }
  977.  
  978. static void
  979. ilo_stream_output_target_destroy(struct pipe_context *pipe,
  980.                                  struct pipe_stream_output_target *target)
  981. {
  982.    pipe_resource_reference(&target->buffer, NULL);
  983.    FREE(target);
  984. }
  985.  
  986. static struct pipe_sampler_view *
  987. ilo_create_sampler_view(struct pipe_context *pipe,
  988.                         struct pipe_resource *res,
  989.                         const struct pipe_sampler_view *templ)
  990. {
  991.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  992.    struct ilo_view_cso *view;
  993.  
  994.    view = MALLOC_STRUCT(ilo_view_cso);
  995.    assert(view);
  996.  
  997.    view->base = *templ;
  998.    pipe_reference_init(&view->base.reference, 1);
  999.    view->base.texture = NULL;
  1000.    pipe_resource_reference(&view->base.texture, res);
  1001.    view->base.context = pipe;
  1002.  
  1003.    if (res->target == PIPE_BUFFER) {
  1004.       const unsigned elem_size = util_format_get_blocksize(templ->format);
  1005.       const unsigned first_elem = templ->u.buf.first_element;
  1006.       const unsigned num_elems = templ->u.buf.last_element - first_elem + 1;
  1007.  
  1008.       ilo_gpe_init_view_surface_for_buffer(dev, ilo_buffer(res),
  1009.             first_elem * elem_size, num_elems * elem_size,
  1010.             elem_size, templ->format, false, false, &view->surface);
  1011.    }
  1012.    else {
  1013.       struct ilo_texture *tex = ilo_texture(res);
  1014.  
  1015.       /* warn about degraded performance because of a missing binding flag */
  1016.       if (tex->image.tiling == GEN6_TILING_NONE &&
  1017.           !(tex->base.bind & PIPE_BIND_SAMPLER_VIEW)) {
  1018.          ilo_warn("creating sampler view for a resource "
  1019.                   "not created for sampling\n");
  1020.       }
  1021.  
  1022.       ilo_gpe_init_view_surface_for_image(dev, &tex->image,
  1023.             tex->base.target, templ->format,
  1024.             templ->u.tex.first_level,
  1025.             templ->u.tex.last_level - templ->u.tex.first_level + 1,
  1026.             templ->u.tex.first_layer,
  1027.             templ->u.tex.last_layer - templ->u.tex.first_layer + 1,
  1028.             false, &view->surface);
  1029.    }
  1030.  
  1031.    return &view->base;
  1032. }
  1033.  
  1034. static void
  1035. ilo_sampler_view_destroy(struct pipe_context *pipe,
  1036.                          struct pipe_sampler_view *view)
  1037. {
  1038.    pipe_resource_reference(&view->texture, NULL);
  1039.    FREE(view);
  1040. }
  1041.  
  1042. static struct pipe_surface *
  1043. ilo_create_surface(struct pipe_context *pipe,
  1044.                    struct pipe_resource *res,
  1045.                    const struct pipe_surface *templ)
  1046. {
  1047.    const struct ilo_dev *dev = ilo_context(pipe)->dev;
  1048.    struct ilo_texture *tex = ilo_texture(res);
  1049.    struct ilo_surface_cso *surf;
  1050.  
  1051.    surf = MALLOC_STRUCT(ilo_surface_cso);
  1052.    assert(surf);
  1053.  
  1054.    surf->base = *templ;
  1055.    pipe_reference_init(&surf->base.reference, 1);
  1056.    surf->base.texture = NULL;
  1057.    pipe_resource_reference(&surf->base.texture, &tex->base);
  1058.  
  1059.    surf->base.context = pipe;
  1060.    surf->base.width = u_minify(tex->base.width0, templ->u.tex.level);
  1061.    surf->base.height = u_minify(tex->base.height0, templ->u.tex.level);
  1062.  
  1063.    surf->is_rt = !util_format_is_depth_or_stencil(templ->format);
  1064.  
  1065.    if (surf->is_rt) {
  1066.       /* relax this? */
  1067.       assert(tex->base.target != PIPE_BUFFER);
  1068.  
  1069.       /*
  1070.        * classic i965 sets render_cache_rw for constant buffers and sol
  1071.        * surfaces but not render buffers.  Why?
  1072.        */
  1073.       ilo_gpe_init_view_surface_for_image(dev,
  1074.             &tex->image, tex->base.target,
  1075.             templ->format, templ->u.tex.level, 1,
  1076.             templ->u.tex.first_layer,
  1077.             templ->u.tex.last_layer - templ->u.tex.first_layer + 1,
  1078.             true, &surf->u.rt);
  1079.    } else {
  1080.       assert(res->target != PIPE_BUFFER);
  1081.  
  1082.       ilo_gpe_init_zs_surface(dev, &tex->image,
  1083.             (tex->separate_s8) ? &tex->separate_s8->image : NULL,
  1084.             tex->base.target, templ->format,
  1085.             templ->u.tex.level, templ->u.tex.first_layer,
  1086.             templ->u.tex.last_layer - templ->u.tex.first_layer + 1,
  1087.             &surf->u.zs);
  1088.    }
  1089.  
  1090.    return &surf->base;
  1091. }
  1092.  
  1093. static void
  1094. ilo_surface_destroy(struct pipe_context *pipe,
  1095.                     struct pipe_surface *surface)
  1096. {
  1097.    pipe_resource_reference(&surface->texture, NULL);
  1098.    FREE(surface);
  1099. }
  1100.  
  1101. static void *
  1102. ilo_create_compute_state(struct pipe_context *pipe,
  1103.                          const struct pipe_compute_state *state)
  1104. {
  1105.    struct ilo_context *ilo = ilo_context(pipe);
  1106.    struct ilo_shader_state *shader;
  1107.  
  1108.    shader = ilo_shader_create_cs(ilo->dev, state, &ilo->state_vector);
  1109.    assert(shader);
  1110.  
  1111.    ilo_shader_cache_add(ilo->shader_cache, shader);
  1112.  
  1113.    return shader;
  1114. }
  1115.  
  1116. static void
  1117. ilo_bind_compute_state(struct pipe_context *pipe, void *state)
  1118. {
  1119.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  1120.  
  1121.    vec->cs = state;
  1122.  
  1123.    vec->dirty |= ILO_DIRTY_CS;
  1124. }
  1125.  
  1126. static void
  1127. ilo_delete_compute_state(struct pipe_context *pipe, void *state)
  1128. {
  1129.    struct ilo_context *ilo = ilo_context(pipe);
  1130.    struct ilo_shader_state *cs = (struct ilo_shader_state *) state;
  1131.  
  1132.    ilo_shader_cache_remove(ilo->shader_cache, cs);
  1133.    ilo_shader_destroy(cs);
  1134. }
  1135.  
  1136. static void
  1137. ilo_set_compute_resources(struct pipe_context *pipe,
  1138.                           unsigned start, unsigned count,
  1139.                           struct pipe_surface **surfaces)
  1140. {
  1141.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  1142.    struct ilo_resource_state *dst = &vec->cs_resource;
  1143.    unsigned i;
  1144.  
  1145.    assert(start + count <= Elements(dst->states));
  1146.  
  1147.    if (surfaces) {
  1148.       for (i = 0; i < count; i++)
  1149.          pipe_surface_reference(&dst->states[start + i], surfaces[i]);
  1150.    }
  1151.    else {
  1152.       for (i = 0; i < count; i++)
  1153.          pipe_surface_reference(&dst->states[start + i], NULL);
  1154.    }
  1155.  
  1156.    if (dst->count <= start + count) {
  1157.       if (surfaces)
  1158.          count += start;
  1159.       else
  1160.          count = start;
  1161.  
  1162.       while (count > 0 && !dst->states[count - 1])
  1163.          count--;
  1164.  
  1165.       dst->count = count;
  1166.    }
  1167.  
  1168.    vec->dirty |= ILO_DIRTY_CS_RESOURCE;
  1169. }
  1170.  
  1171. static void
  1172. ilo_set_global_binding(struct pipe_context *pipe,
  1173.                        unsigned start, unsigned count,
  1174.                        struct pipe_resource **resources,
  1175.                        uint32_t **handles)
  1176. {
  1177.    struct ilo_state_vector *vec = &ilo_context(pipe)->state_vector;
  1178.    struct ilo_global_binding_cso *dst;
  1179.    unsigned i;
  1180.  
  1181.    /* make room */
  1182.    if (vec->global_binding.count < start + count) {
  1183.       if (resources) {
  1184.          const unsigned old_size = vec->global_binding.bindings.size;
  1185.          const unsigned new_size = sizeof(*dst) * (start + count);
  1186.  
  1187.          if (old_size < new_size) {
  1188.             util_dynarray_resize(&vec->global_binding.bindings, new_size);
  1189.             memset(vec->global_binding.bindings.data + old_size, 0,
  1190.                   new_size - old_size);
  1191.          }
  1192.       } else {
  1193.          count = vec->global_binding.count - start;
  1194.       }
  1195.    }
  1196.  
  1197.    dst = util_dynarray_element(&vec->global_binding.bindings,
  1198.          struct ilo_global_binding_cso, start);
  1199.  
  1200.    if (resources) {
  1201.       for (i = 0; i < count; i++) {
  1202.          pipe_resource_reference(&dst[i].resource, resources[i]);
  1203.          dst[i].handle = handles[i];
  1204.       }
  1205.    } else {
  1206.       for (i = 0; i < count; i++) {
  1207.          pipe_resource_reference(&dst[i].resource, NULL);
  1208.          dst[i].handle = NULL;
  1209.       }
  1210.    }
  1211.  
  1212.    if (vec->global_binding.count <= start + count) {
  1213.       dst = util_dynarray_begin(&vec->global_binding.bindings);
  1214.  
  1215.       if (resources)
  1216.          count += start;
  1217.       else
  1218.          count = start;
  1219.  
  1220.       while (count > 0 && !dst[count - 1].resource)
  1221.          count--;
  1222.  
  1223.       vec->global_binding.count = count;
  1224.    }
  1225.  
  1226.    vec->dirty |= ILO_DIRTY_GLOBAL_BINDING;
  1227. }
  1228.  
  1229. /**
  1230.  * Initialize state-related functions.
  1231.  */
  1232. void
  1233. ilo_init_state_functions(struct ilo_context *ilo)
  1234. {
  1235.    STATIC_ASSERT(ILO_STATE_COUNT <= 32);
  1236.  
  1237.    ilo->base.create_blend_state = ilo_create_blend_state;
  1238.    ilo->base.bind_blend_state = ilo_bind_blend_state;
  1239.    ilo->base.delete_blend_state = ilo_delete_blend_state;
  1240.    ilo->base.create_sampler_state = ilo_create_sampler_state;
  1241.    ilo->base.bind_sampler_states = ilo_bind_sampler_states;
  1242.    ilo->base.delete_sampler_state = ilo_delete_sampler_state;
  1243.    ilo->base.create_rasterizer_state = ilo_create_rasterizer_state;
  1244.    ilo->base.bind_rasterizer_state = ilo_bind_rasterizer_state;
  1245.    ilo->base.delete_rasterizer_state = ilo_delete_rasterizer_state;
  1246.    ilo->base.create_depth_stencil_alpha_state = ilo_create_depth_stencil_alpha_state;
  1247.    ilo->base.bind_depth_stencil_alpha_state = ilo_bind_depth_stencil_alpha_state;
  1248.    ilo->base.delete_depth_stencil_alpha_state = ilo_delete_depth_stencil_alpha_state;
  1249.    ilo->base.create_fs_state = ilo_create_fs_state;
  1250.    ilo->base.bind_fs_state = ilo_bind_fs_state;
  1251.    ilo->base.delete_fs_state = ilo_delete_fs_state;
  1252.    ilo->base.create_vs_state = ilo_create_vs_state;
  1253.    ilo->base.bind_vs_state = ilo_bind_vs_state;
  1254.    ilo->base.delete_vs_state = ilo_delete_vs_state;
  1255.    ilo->base.create_gs_state = ilo_create_gs_state;
  1256.    ilo->base.bind_gs_state = ilo_bind_gs_state;
  1257.    ilo->base.delete_gs_state = ilo_delete_gs_state;
  1258.    ilo->base.create_vertex_elements_state = ilo_create_vertex_elements_state;
  1259.    ilo->base.bind_vertex_elements_state = ilo_bind_vertex_elements_state;
  1260.    ilo->base.delete_vertex_elements_state = ilo_delete_vertex_elements_state;
  1261.  
  1262.    ilo->base.set_blend_color = ilo_set_blend_color;
  1263.    ilo->base.set_stencil_ref = ilo_set_stencil_ref;
  1264.    ilo->base.set_sample_mask = ilo_set_sample_mask;
  1265.    ilo->base.set_clip_state = ilo_set_clip_state;
  1266.    ilo->base.set_constant_buffer = ilo_set_constant_buffer;
  1267.    ilo->base.set_framebuffer_state = ilo_set_framebuffer_state;
  1268.    ilo->base.set_polygon_stipple = ilo_set_polygon_stipple;
  1269.    ilo->base.set_scissor_states = ilo_set_scissor_states;
  1270.    ilo->base.set_viewport_states = ilo_set_viewport_states;
  1271.    ilo->base.set_sampler_views = ilo_set_sampler_views;
  1272.    ilo->base.set_shader_resources = ilo_set_shader_resources;
  1273.    ilo->base.set_vertex_buffers = ilo_set_vertex_buffers;
  1274.    ilo->base.set_index_buffer = ilo_set_index_buffer;
  1275.  
  1276.    ilo->base.create_stream_output_target = ilo_create_stream_output_target;
  1277.    ilo->base.stream_output_target_destroy = ilo_stream_output_target_destroy;
  1278.    ilo->base.set_stream_output_targets = ilo_set_stream_output_targets;
  1279.  
  1280.    ilo->base.create_sampler_view = ilo_create_sampler_view;
  1281.    ilo->base.sampler_view_destroy = ilo_sampler_view_destroy;
  1282.  
  1283.    ilo->base.create_surface = ilo_create_surface;
  1284.    ilo->base.surface_destroy = ilo_surface_destroy;
  1285.  
  1286.    ilo->base.create_compute_state = ilo_create_compute_state;
  1287.    ilo->base.bind_compute_state = ilo_bind_compute_state;
  1288.    ilo->base.delete_compute_state = ilo_delete_compute_state;
  1289.    ilo->base.set_compute_resources = ilo_set_compute_resources;
  1290.    ilo->base.set_global_binding = ilo_set_global_binding;
  1291. }
  1292.  
  1293. void
  1294. ilo_state_vector_init(const struct ilo_dev *dev,
  1295.                       struct ilo_state_vector *vec)
  1296. {
  1297.    ilo_gpe_set_scissor_null(dev, &vec->scissor);
  1298.  
  1299.    ilo_gpe_init_zs_surface(dev, NULL, NULL, PIPE_TEXTURE_2D,
  1300.          PIPE_FORMAT_NONE, 0, 0, 1, &vec->fb.null_zs);
  1301.  
  1302.    util_dynarray_init(&vec->global_binding.bindings);
  1303.  
  1304.    vec->dirty = ILO_DIRTY_ALL;
  1305. }
  1306.  
  1307. void
  1308. ilo_state_vector_cleanup(struct ilo_state_vector *vec)
  1309. {
  1310.    unsigned i, sh;
  1311.  
  1312.    for (i = 0; i < Elements(vec->vb.states); i++) {
  1313.       if (vec->vb.enabled_mask & (1 << i))
  1314.          pipe_resource_reference(&vec->vb.states[i].buffer, NULL);
  1315.    }
  1316.  
  1317.    pipe_resource_reference(&vec->ib.buffer, NULL);
  1318.    pipe_resource_reference(&vec->ib.hw_resource, NULL);
  1319.  
  1320.    for (i = 0; i < vec->so.count; i++)
  1321.       pipe_so_target_reference(&vec->so.states[i], NULL);
  1322.  
  1323.    for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
  1324.       for (i = 0; i < vec->view[sh].count; i++) {
  1325.          struct pipe_sampler_view *view = vec->view[sh].states[i];
  1326.          pipe_sampler_view_reference(&view, NULL);
  1327.       }
  1328.  
  1329.       for (i = 0; i < Elements(vec->cbuf[sh].cso); i++) {
  1330.          struct ilo_cbuf_cso *cbuf = &vec->cbuf[sh].cso[i];
  1331.          pipe_resource_reference(&cbuf->resource, NULL);
  1332.       }
  1333.    }
  1334.  
  1335.    for (i = 0; i < vec->resource.count; i++)
  1336.       pipe_surface_reference(&vec->resource.states[i], NULL);
  1337.  
  1338.    for (i = 0; i < vec->fb.state.nr_cbufs; i++)
  1339.       pipe_surface_reference(&vec->fb.state.cbufs[i], NULL);
  1340.  
  1341.    if (vec->fb.state.zsbuf)
  1342.       pipe_surface_reference(&vec->fb.state.zsbuf, NULL);
  1343.  
  1344.    for (i = 0; i < vec->cs_resource.count; i++)
  1345.       pipe_surface_reference(&vec->cs_resource.states[i], NULL);
  1346.  
  1347.    for (i = 0; i < vec->global_binding.count; i++) {
  1348.       struct ilo_global_binding_cso *cso =
  1349.          util_dynarray_element(&vec->global_binding.bindings,
  1350.                struct ilo_global_binding_cso, i);
  1351.       pipe_resource_reference(&cso->resource, NULL);
  1352.    }
  1353.  
  1354.    util_dynarray_fini(&vec->global_binding.bindings);
  1355. }
  1356.  
  1357. /**
  1358.  * Mark all states that have the resource dirty.
  1359.  */
  1360. void
  1361. ilo_state_vector_resource_renamed(struct ilo_state_vector *vec,
  1362.                                   struct pipe_resource *res)
  1363. {
  1364.    struct intel_bo *bo = ilo_resource_get_bo(res);
  1365.    uint32_t states = 0;
  1366.    unsigned sh, i;
  1367.  
  1368.    if (res->target == PIPE_BUFFER) {
  1369.       uint32_t vb_mask = vec->vb.enabled_mask;
  1370.  
  1371.       while (vb_mask) {
  1372.          const unsigned idx = u_bit_scan(&vb_mask);
  1373.  
  1374.          if (vec->vb.states[idx].buffer == res) {
  1375.             states |= ILO_DIRTY_VB;
  1376.             break;
  1377.          }
  1378.       }
  1379.  
  1380.       if (vec->ib.buffer == res) {
  1381.          states |= ILO_DIRTY_IB;
  1382.  
  1383.          /*
  1384.           * finalize_index_buffer() has an optimization that clears
  1385.           * ILO_DIRTY_IB when the HW states do not change.  However, it fails
  1386.           * to flush the VF cache when the HW states do not change, but the
  1387.           * contents of the IB has changed.  Here, we set the index size to an
  1388.           * invalid value to avoid the optimization.
  1389.           */
  1390.          vec->ib.hw_index_size = 0;
  1391.       }
  1392.  
  1393.       for (i = 0; i < vec->so.count; i++) {
  1394.          if (vec->so.states[i]->buffer == res) {
  1395.             states |= ILO_DIRTY_SO;
  1396.             break;
  1397.          }
  1398.       }
  1399.    }
  1400.  
  1401.    for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
  1402.       for (i = 0; i < vec->view[sh].count; i++) {
  1403.          struct ilo_view_cso *cso = (struct ilo_view_cso *) vec->view[sh].states[i];
  1404.  
  1405.          if (cso->base.texture == res) {
  1406.             static const unsigned view_dirty_bits[PIPE_SHADER_TYPES] = {
  1407.                [PIPE_SHADER_VERTEX]    = ILO_DIRTY_VIEW_VS,
  1408.                [PIPE_SHADER_FRAGMENT]  = ILO_DIRTY_VIEW_FS,
  1409.                [PIPE_SHADER_GEOMETRY]  = ILO_DIRTY_VIEW_GS,
  1410.                [PIPE_SHADER_COMPUTE]   = ILO_DIRTY_VIEW_CS,
  1411.             };
  1412.             cso->surface.bo = bo;
  1413.  
  1414.             states |= view_dirty_bits[sh];
  1415.             break;
  1416.          }
  1417.       }
  1418.  
  1419.       if (res->target == PIPE_BUFFER) {
  1420.          for (i = 0; i < Elements(vec->cbuf[sh].cso); i++) {
  1421.             struct ilo_cbuf_cso *cbuf = &vec->cbuf[sh].cso[i];
  1422.  
  1423.             if (cbuf->resource == res) {
  1424.                cbuf->surface.bo = bo;
  1425.                states |= ILO_DIRTY_CBUF;
  1426.                break;
  1427.             }
  1428.          }
  1429.       }
  1430.    }
  1431.  
  1432.    for (i = 0; i < vec->resource.count; i++) {
  1433.       struct ilo_surface_cso *cso =
  1434.          (struct ilo_surface_cso *) vec->resource.states[i];
  1435.  
  1436.       if (cso->base.texture == res) {
  1437.          cso->u.rt.bo = bo;
  1438.          states |= ILO_DIRTY_RESOURCE;
  1439.          break;
  1440.       }
  1441.    }
  1442.  
  1443.    /* for now? */
  1444.    if (res->target != PIPE_BUFFER) {
  1445.       for (i = 0; i < vec->fb.state.nr_cbufs; i++) {
  1446.          struct ilo_surface_cso *cso =
  1447.             (struct ilo_surface_cso *) vec->fb.state.cbufs[i];
  1448.          if (cso && cso->base.texture == res) {
  1449.             cso->u.rt.bo = bo;
  1450.             states |= ILO_DIRTY_FB;
  1451.             break;
  1452.          }
  1453.       }
  1454.  
  1455.       if (vec->fb.state.zsbuf && vec->fb.state.zsbuf->texture == res) {
  1456.          struct ilo_surface_cso *cso =
  1457.             (struct ilo_surface_cso *) vec->fb.state.zsbuf;
  1458.  
  1459.          cso->u.rt.bo = bo;
  1460.          states |= ILO_DIRTY_FB;
  1461.       }
  1462.    }
  1463.  
  1464.    for (i = 0; i < vec->cs_resource.count; i++) {
  1465.       struct ilo_surface_cso *cso =
  1466.          (struct ilo_surface_cso *) vec->cs_resource.states[i];
  1467.       if (cso->base.texture == res) {
  1468.          cso->u.rt.bo = bo;
  1469.          states |= ILO_DIRTY_CS_RESOURCE;
  1470.          break;
  1471.       }
  1472.    }
  1473.  
  1474.    for (i = 0; i < vec->global_binding.count; i++) {
  1475.       struct ilo_global_binding_cso *cso =
  1476.          util_dynarray_element(&vec->global_binding.bindings,
  1477.                struct ilo_global_binding_cso, i);
  1478.  
  1479.       if (cso->resource == res) {
  1480.          states |= ILO_DIRTY_GLOBAL_BINDING;
  1481.          break;
  1482.       }
  1483.    }
  1484.  
  1485.    vec->dirty |= states;
  1486. }
  1487.  
  1488. void
  1489. ilo_state_vector_dump_dirty(const struct ilo_state_vector *vec)
  1490. {
  1491.    static const char *state_names[ILO_STATE_COUNT] = {
  1492.       [ILO_STATE_VB]              = "VB",
  1493.       [ILO_STATE_VE]              = "VE",
  1494.       [ILO_STATE_IB]              = "IB",
  1495.       [ILO_STATE_VS]              = "VS",
  1496.       [ILO_STATE_GS]              = "GS",
  1497.       [ILO_STATE_SO]              = "SO",
  1498.       [ILO_STATE_CLIP]            = "CLIP",
  1499.       [ILO_STATE_VIEWPORT]        = "VIEWPORT",
  1500.       [ILO_STATE_SCISSOR]         = "SCISSOR",
  1501.       [ILO_STATE_RASTERIZER]      = "RASTERIZER",
  1502.       [ILO_STATE_POLY_STIPPLE]    = "POLY_STIPPLE",
  1503.       [ILO_STATE_SAMPLE_MASK]     = "SAMPLE_MASK",
  1504.       [ILO_STATE_FS]              = "FS",
  1505.       [ILO_STATE_DSA]             = "DSA",
  1506.       [ILO_STATE_STENCIL_REF]     = "STENCIL_REF",
  1507.       [ILO_STATE_BLEND]           = "BLEND",
  1508.       [ILO_STATE_BLEND_COLOR]     = "BLEND_COLOR",
  1509.       [ILO_STATE_FB]              = "FB",
  1510.       [ILO_STATE_SAMPLER_VS]      = "SAMPLER_VS",
  1511.       [ILO_STATE_SAMPLER_GS]      = "SAMPLER_GS",
  1512.       [ILO_STATE_SAMPLER_FS]      = "SAMPLER_FS",
  1513.       [ILO_STATE_SAMPLER_CS]      = "SAMPLER_CS",
  1514.       [ILO_STATE_VIEW_VS]         = "VIEW_VS",
  1515.       [ILO_STATE_VIEW_GS]         = "VIEW_GS",
  1516.       [ILO_STATE_VIEW_FS]         = "VIEW_FS",
  1517.       [ILO_STATE_VIEW_CS]         = "VIEW_CS",
  1518.       [ILO_STATE_CBUF]            = "CBUF",
  1519.       [ILO_STATE_RESOURCE]        = "RESOURCE",
  1520.       [ILO_STATE_CS]              = "CS",
  1521.       [ILO_STATE_CS_RESOURCE]     = "CS_RESOURCE",
  1522.       [ILO_STATE_GLOBAL_BINDING]  = "GLOBAL_BINDING",
  1523.    };
  1524.    uint32_t dirty = vec->dirty;
  1525.  
  1526.    if (!dirty) {
  1527.       ilo_printf("no state is dirty\n");
  1528.       return;
  1529.    }
  1530.  
  1531.    dirty &= (1U << ILO_STATE_COUNT) - 1;
  1532.  
  1533.    ilo_printf("%2d states are dirty:", util_bitcount(dirty));
  1534.    while (dirty) {
  1535.       const enum ilo_state state = u_bit_scan(&dirty);
  1536.       ilo_printf(" %s", state_names[state]);
  1537.    }
  1538.    ilo_printf("\n");
  1539. }
  1540.