summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/gallium/drivers/svga/svga_state_need_swtnl.c24
1 files changed, 23 insertions, 1 deletions
diff --git a/src/gallium/drivers/svga/svga_state_need_swtnl.c b/src/gallium/drivers/svga/svga_state_need_swtnl.c
index 5693c1ac9b2..8c39a4b76f0 100644
--- a/src/gallium/drivers/svga/svga_state_need_swtnl.c
+++ b/src/gallium/drivers/svga/svga_state_need_swtnl.c
@@ -141,6 +141,27 @@ update_need_pipeline( struct svga_context *svga,
need_pipeline = TRUE;
}
+ /* SVGA_NEW_FS, SVGA_NEW_RAST, SVGA_NEW_REDUCED_PRIMITIVE
+ */
+ if (svga->curr.reduced_prim == PIPE_PRIM_POINTS) {
+ unsigned sprite_coord_gen = svga->curr.rast->templ.sprite_coord_enable;
+ unsigned generic_inputs = svga->curr.fs->generic_inputs;
+
+ if (sprite_coord_gen &&
+ (generic_inputs & ~sprite_coord_gen)) {
+ /* The fragment shader is using some generic inputs that are
+ * not being replaced by auto-generated point/sprite coords (and
+ * auto sprite coord generation is turned on).
+ * The SVGA3D interface does not support that: if we enable
+ * SVGA3D_RS_POINTSPRITEENABLE it gets enabled for _all_
+ * texture coordinate sets.
+ * To solve this, we have to use the draw-module's wide/sprite
+ * point stage.
+ */
+ need_pipeline = TRUE;
+ }
+ }
+
if (need_pipeline != svga->state.sw.need_pipeline) {
svga->state.sw.need_pipeline = need_pipeline;
svga->dirty |= SVGA_NEW_NEED_PIPELINE;
@@ -158,6 +179,7 @@ struct svga_tracked_state svga_update_need_pipeline =
{
"need pipeline",
(SVGA_NEW_RAST |
+ SVGA_NEW_FS |
SVGA_NEW_VS |
SVGA_NEW_REDUCED_PRIMITIVE),
update_need_pipeline
@@ -186,7 +208,7 @@ update_need_swtnl( struct svga_context *svga,
}
/*
- * Some state changes the draw module does makes us belive we
+ * Some state changes the draw module does makes us believe we
* we don't need swtnl. This causes the vdecl code to pickup
* the wrong buffers and vertex formats. Try trivial/line-wide.
*/