aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJason Ekstrand <[email protected]>2018-10-19 12:06:36 -0500
committerJason Ekstrand <[email protected]>2018-10-26 11:45:29 -0500
commit5cdeefe057d9da0a48630770dc8f01e0f5295fa2 (patch)
tree57aa683f27bb4e290bbc20f3e9906c27a0a0e158
parent18fb2c5d92b1fe3793f516cc7d45f058b61e57b5 (diff)
intel/nir: Use the OPT macro for more passes
Reviewed-by: Ian Romanick <[email protected]>
-rw-r--r--src/intel/compiler/brw_nir.c6
1 files changed, 3 insertions, 3 deletions
diff --git a/src/intel/compiler/brw_nir.c b/src/intel/compiler/brw_nir.c
index 1cd56861578..cf5a4a96d67 100644
--- a/src/intel/compiler/brw_nir.c
+++ b/src/intel/compiler/brw_nir.c
@@ -674,7 +674,7 @@ brw_preprocess_nir(const struct brw_compiler *compiler, nir_shader *nir)
/* Lower int64 instructions before nir_optimize so that loop unrolling
* sees their actual cost.
*/
- nir_lower_int64(nir, nir_lower_imul64 |
+ OPT(nir_lower_int64, nir_lower_imul64 |
nir_lower_isign64 |
nir_lower_divmod64);
@@ -687,7 +687,7 @@ brw_preprocess_nir(const struct brw_compiler *compiler, nir_shader *nir)
OPT(nir_opt_large_constants, NULL, 32);
}
- nir_lower_bit_size(nir, lower_bit_size_callback, NULL);
+ OPT(nir_lower_bit_size, lower_bit_size_callback, NULL);
if (is_scalar) {
OPT(nir_lower_load_const_to_scalar);
@@ -712,7 +712,7 @@ brw_preprocess_nir(const struct brw_compiler *compiler, nir_shader *nir)
nir_variable_mode indirect_mask =
brw_nir_no_indirect_mask(compiler, nir->info.stage);
- nir_lower_indirect_derefs(nir, indirect_mask);
+ OPT(nir_lower_indirect_derefs, indirect_mask);
/* Get rid of split copies */
nir = brw_nir_optimize(nir, compiler, is_scalar, false);