summaryrefslogtreecommitdiffstats
path: root/src/compiler/nir/nir.h
diff options
context:
space:
mode:
authorJason Ekstrand <[email protected]>2018-08-31 07:35:17 -0500
committerJason Ekstrand <[email protected]>2018-09-06 16:07:50 -0500
commit44ec31cd75f679b32afb03c3c9c46762e57ce506 (patch)
treec9ff90ef7289fb26513724d0663fcdb91193f34a /src/compiler/nir/nir.h
parent0909a57b631f2b200a7422907df6302a72930252 (diff)
nir: Drop the vs_inputs_dual_locations option
It was very inconsistently handled; the only things that made use of it were glsl_to_nir, glspirv, and nir_gather_info. In particular, nir_lower_io completely ignored it so anyone using nir_lower_io on 64-bit vertex attributes was going to be in for a shock. Also, as of the previous commit, it's set by every driver that supports 64-bit vertex attributes. There's no longer any reason to have it be an option so let's just delete it. Reviewed-by: Alejandro PiƱeiro <[email protected]> Reviewed-by: Timothy Arceri <[email protected]>
Diffstat (limited to 'src/compiler/nir/nir.h')
-rw-r--r--src/compiler/nir/nir.h9
1 files changed, 1 insertions, 8 deletions
diff --git a/src/compiler/nir/nir.h b/src/compiler/nir/nir.h
index b9393702097..bf4bd916d27 100644
--- a/src/compiler/nir/nir.h
+++ b/src/compiler/nir/nir.h
@@ -2119,12 +2119,6 @@ typedef struct nir_shader_compiler_options {
*/
bool use_interpolated_input_intrinsics;
- /**
- * Do vertex shader double inputs use two locations? The Vulkan spec
- * requires two locations to be used, OpenGL allows a single location.
- */
- bool vs_inputs_dual_locations;
-
unsigned max_unroll_iterations;
} nir_shader_compiler_options;
@@ -3039,9 +3033,8 @@ bool nir_opt_conditional_discard(nir_shader *shader);
void nir_sweep(nir_shader *shader);
-uint64_t nir_get_dual_slot_attributes(nir_shader *shader);
void nir_remap_dual_slot_attributes(nir_shader *shader,
- uint64_t dual_slot);
+ uint64_t *dual_slot_inputs);
uint64_t nir_get_single_slot_attribs_mask(uint64_t attribs, uint64_t dual_slot);
nir_intrinsic_op nir_intrinsic_from_system_value(gl_system_value val);