1 /*
2 * Copyright © 2023 Imagination Technologies Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a copy
5 * of this software and associated documentation files (the "Software"), to deal
6 * in the Software without restriction, including without limitation the rights
7 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 * copies of the Software, and to permit persons to whom the Software is
9 * furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 * SOFTWARE.
22 */
23
24 #include "rogue.h"
25 #include "rogue_builder.h"
26 #include "util/macros.h"
27
28 #include <stdbool.h>
29
30 /**
31 * \file rogue_lower_late_ops.c
32 *
33 * \brief Contains the rogue_lower_late_ops pass.
34 */
35
36 /* TODO NEXT!: Check if registers are being written to that require special
37 * behaviour, like vertex out.
38 */
39 /* TODO NEXT!: Make sure that SSA regs aren't being used, late passes must
40 * happen after SSA.
41 */
rogue_lower_CMOV(rogue_builder * b,rogue_alu_instr * cmov)42 static inline bool rogue_lower_CMOV(rogue_builder *b, rogue_alu_instr *cmov)
43 {
44 rogue_instr *instr_true =
45 &rogue_MBYP(b, cmov->dst[0].ref, cmov->src[1].ref)->instr;
46 rogue_instr *instr_false =
47 &rogue_MBYP(b, cmov->dst[0].ref, cmov->src[2].ref)->instr;
48
49 rogue_set_instr_exec_cond(instr_true, ROGUE_EXEC_COND_P0_TRUE);
50 rogue_set_instr_exec_cond(instr_false, ROGUE_EXEC_COND_P0_FALSE);
51
52 rogue_merge_instr_comment(instr_true, &cmov->instr, "cmov (true)");
53 rogue_merge_instr_comment(instr_false, &cmov->instr, "cmov (false)");
54
55 rogue_instr_delete(&cmov->instr);
56
57 return true;
58 }
59
rogue_lower_alu_instr(rogue_builder * b,rogue_alu_instr * alu)60 static inline bool rogue_lower_alu_instr(rogue_builder *b, rogue_alu_instr *alu)
61 {
62 switch (alu->op) {
63 case ROGUE_ALU_OP_CMOV:
64 return rogue_lower_CMOV(b, alu);
65
66 default:
67 break;
68 }
69
70 return false;
71 }
72
73 PUBLIC
rogue_lower_late_ops(rogue_shader * shader)74 bool rogue_lower_late_ops(rogue_shader *shader)
75 {
76 if (shader->is_grouped)
77 return false;
78
79 bool progress = false;
80
81 rogue_builder b;
82 rogue_builder_init(&b, shader);
83
84 rogue_foreach_instr_in_shader_safe (instr, shader) {
85 /* Skip real ops. */
86 if (rogue_instr_supported_phases(instr))
87 continue;
88
89 b.cursor = rogue_cursor_before_instr(instr);
90 switch (instr->type) {
91 case ROGUE_INSTR_TYPE_ALU:
92 progress |= rogue_lower_alu_instr(&b, rogue_instr_as_alu(instr));
93 break;
94
95 default:
96 continue;
97 }
98 }
99
100 return progress;
101 }
102