1 // Copyright © 2022 Collabora, Ltd. 2 // SPDX-License-Identifier: MIT 3 4 use crate::bindings::*; 5 6 use std::ffi::{c_void, CStr}; 7 use std::marker::PhantomData; 8 use std::ptr::NonNull; 9 use std::str; 10 11 // from https://internals.rust-lang.org/t/discussion-on-offset-of/7440/2 12 macro_rules! offset_of { 13 ($Struct:path, $field:ident) => {{ 14 // Using a separate function to minimize unhygienic hazards 15 // (e.g. unsafety of #[repr(packed)] field borrows). 16 // Uncomment `const` when `const fn`s can juggle pointers. 17 18 // const 19 fn offset() -> usize { 20 let u = std::mem::MaybeUninit::<$Struct>::uninit(); 21 // Use pattern-matching to avoid accidentally going through Deref. 22 let &$Struct { $field: ref f, .. } = unsafe { &*u.as_ptr() }; 23 let o = 24 (f as *const _ as usize).wrapping_sub(&u as *const _ as usize); 25 // Triple check that we are within `u` still. 26 assert!((0..=std::mem::size_of_val(&u)).contains(&o)); 27 o 28 } 29 offset() 30 }}; 31 } 32 33 pub struct ExecListIter<'a, T> { 34 n: &'a exec_node, 35 offset: usize, 36 rev: bool, 37 _marker: PhantomData<T>, 38 } 39 40 impl<'a, T> ExecListIter<'a, T> { new(l: &'a exec_list, offset: usize) -> Self41 fn new(l: &'a exec_list, offset: usize) -> Self { 42 Self { 43 n: &l.head_sentinel, 44 offset: offset, 45 rev: false, 46 _marker: PhantomData, 47 } 48 } 49 50 #[allow(dead_code)] new_rev(l: &'a exec_list, offset: usize) -> Self51 fn new_rev(l: &'a exec_list, offset: usize) -> Self { 52 Self { 53 n: &l.tail_sentinel, 54 offset: offset, 55 rev: true, 56 _marker: PhantomData, 57 } 58 } 59 at(n: &'a exec_node, offset: usize, rev: bool) -> Self60 fn at(n: &'a exec_node, offset: usize, rev: bool) -> Self { 61 Self { 62 n, 63 offset: offset, 64 rev: rev, 65 _marker: PhantomData, 66 } 67 } 68 } 69 70 impl<'a, T: 'a> Iterator for ExecListIter<'a, T> { 71 type Item = &'a T; 72 next(&mut self) -> Option<Self::Item>73 fn next(&mut self) -> Option<Self::Item> { 74 if self.rev { 75 self.n = unsafe { &*self.n.prev }; 76 if self.n.prev.is_null() { 77 None 78 } else { 79 let t: *const c_void = (self.n as *const exec_node).cast(); 80 Some(unsafe { &*(t.sub(self.offset).cast()) }) 81 } 82 } else { 83 self.n = unsafe { &*self.n.next }; 84 if self.n.next.is_null() { 85 None 86 } else { 87 let t: *const c_void = (self.n as *const exec_node).cast(); 88 Some(unsafe { &*(t.sub(self.offset).cast()) }) 89 } 90 } 91 } 92 } 93 94 impl nir_def { parent_instr<'a>(&'a self) -> &'a nir_instr95 pub fn parent_instr<'a>(&'a self) -> &'a nir_instr { 96 unsafe { NonNull::new(self.parent_instr).unwrap().as_ref() } 97 } 98 components_read(&self) -> nir_component_mask_t99 pub fn components_read(&self) -> nir_component_mask_t { 100 unsafe { nir_def_components_read(self as *const _) } 101 } 102 all_uses_are_fsat(&self) -> bool103 pub fn all_uses_are_fsat(&self) -> bool { 104 unsafe { nir_def_all_uses_are_fsat(self as *const _) } 105 } 106 } 107 108 pub trait AsDef { as_def<'a>(&'a self) -> &'a nir_def109 fn as_def<'a>(&'a self) -> &'a nir_def; 110 bit_size(&self) -> u8111 fn bit_size(&self) -> u8 { 112 self.as_def().bit_size 113 } 114 num_components(&self) -> u8115 fn num_components(&self) -> u8 { 116 self.as_def().num_components 117 } 118 as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr>119 fn as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr> { 120 self.as_def().parent_instr().as_load_const() 121 } 122 is_const(&self) -> bool123 fn is_const(&self) -> bool { 124 self.as_load_const().is_some() 125 } 126 comp_as_int(&self, comp: u8) -> Option<i64>127 fn comp_as_int(&self, comp: u8) -> Option<i64> { 128 if let Some(load) = self.as_load_const() { 129 assert!(comp < load.def.num_components); 130 Some(unsafe { 131 let comp = usize::from(comp); 132 match self.bit_size() { 133 8 => load.values()[comp].i8_ as i64, 134 16 => load.values()[comp].i16_ as i64, 135 32 => load.values()[comp].i32_ as i64, 136 64 => load.values()[comp].i64_, 137 _ => panic!("Invalid bit size"), 138 } 139 }) 140 } else { 141 None 142 } 143 } 144 comp_as_uint(&self, comp: u8) -> Option<u64>145 fn comp_as_uint(&self, comp: u8) -> Option<u64> { 146 if let Some(load) = self.as_load_const() { 147 assert!(comp < load.def.num_components); 148 Some(unsafe { 149 let comp = usize::from(comp); 150 match self.bit_size() { 151 8 => load.values()[comp].u8_ as u64, 152 16 => load.values()[comp].u16_ as u64, 153 32 => load.values()[comp].u32_ as u64, 154 64 => load.values()[comp].u64_, 155 _ => panic!("Invalid bit size"), 156 } 157 }) 158 } else { 159 None 160 } 161 } 162 as_int(&self) -> Option<i64>163 fn as_int(&self) -> Option<i64> { 164 assert!(self.num_components() == 1); 165 self.comp_as_int(0) 166 } 167 as_uint(&self) -> Option<u64>168 fn as_uint(&self) -> Option<u64> { 169 assert!(self.num_components() == 1); 170 self.comp_as_uint(0) 171 } 172 is_zero(&self) -> bool173 fn is_zero(&self) -> bool { 174 self.num_components() == 1 && self.as_uint() == Some(0) 175 } 176 } 177 178 impl AsDef for nir_def { as_def<'a>(&'a self) -> &'a nir_def179 fn as_def<'a>(&'a self) -> &'a nir_def { 180 self 181 } 182 } 183 184 impl AsDef for nir_src { as_def<'a>(&'a self) -> &'a nir_def185 fn as_def<'a>(&'a self) -> &'a nir_def { 186 unsafe { NonNull::new(self.ssa).unwrap().as_ref() } 187 } 188 } 189 190 impl nir_alu_instr { info(&self) -> &'static nir_op_info191 pub fn info(&self) -> &'static nir_op_info { 192 let info_idx: usize = self.op.try_into().unwrap(); 193 unsafe { &nir_op_infos[info_idx] } 194 } 195 src_components(&self, src_idx: u8) -> u8196 pub fn src_components(&self, src_idx: u8) -> u8 { 197 assert!(src_idx < self.info().num_inputs); 198 unsafe { 199 nir_ssa_alu_instr_src_components(self as *const _, src_idx.into()) 200 .try_into() 201 .unwrap() 202 } 203 } 204 srcs_as_slice<'a>(&'a self) -> &'a [nir_alu_src]205 pub fn srcs_as_slice<'a>(&'a self) -> &'a [nir_alu_src] { 206 unsafe { 207 self.src 208 .as_slice(self.info().num_inputs.try_into().unwrap()) 209 } 210 } 211 get_src(&self, idx: usize) -> &nir_alu_src212 pub fn get_src(&self, idx: usize) -> &nir_alu_src { 213 &self.srcs_as_slice()[idx] 214 } 215 } 216 217 impl nir_op_info { name(&self) -> &'static str218 pub fn name(&self) -> &'static str { 219 unsafe { CStr::from_ptr(self.name).to_str().expect("Invalid UTF-8") } 220 } 221 } 222 223 impl nir_alu_src { bit_size(&self) -> u8224 pub fn bit_size(&self) -> u8 { 225 self.src.bit_size() 226 } 227 comp_as_int(&self, comp: u8) -> Option<i64>228 pub fn comp_as_int(&self, comp: u8) -> Option<i64> { 229 self.src.comp_as_int(self.swizzle[usize::from(comp)]) 230 } 231 comp_as_uint(&self, comp: u8) -> Option<u64>232 pub fn comp_as_uint(&self, comp: u8) -> Option<u64> { 233 self.src.comp_as_uint(self.swizzle[usize::from(comp)]) 234 } 235 } 236 237 impl nir_tex_instr { srcs_as_slice<'a>(&'a self) -> &'a [nir_tex_src]238 pub fn srcs_as_slice<'a>(&'a self) -> &'a [nir_tex_src] { 239 unsafe { std::slice::from_raw_parts(self.src, self.num_srcs as usize) } 240 } 241 get_src(&self, idx: usize) -> &nir_tex_src242 pub fn get_src(&self, idx: usize) -> &nir_tex_src { 243 &self.srcs_as_slice()[idx] 244 } 245 } 246 247 impl nir_intrinsic_instr { info(&self) -> &'static nir_intrinsic_info248 pub fn info(&self) -> &'static nir_intrinsic_info { 249 let info_idx: usize = self.intrinsic.try_into().unwrap(); 250 unsafe { &nir_intrinsic_infos[info_idx] } 251 } 252 srcs_as_slice<'a>(&'a self) -> &'a [nir_src]253 pub fn srcs_as_slice<'a>(&'a self) -> &'a [nir_src] { 254 unsafe { self.src.as_slice(self.info().num_srcs.try_into().unwrap()) } 255 } 256 get_src(&self, idx: usize) -> &nir_src257 pub fn get_src(&self, idx: usize) -> &nir_src { 258 &self.srcs_as_slice()[idx] 259 } 260 get_const_index(&self, name: u32) -> u32261 pub fn get_const_index(&self, name: u32) -> u32 { 262 let name: usize = name.try_into().unwrap(); 263 let idx = self.info().index_map[name]; 264 assert!(idx > 0); 265 self.const_index[usize::from(idx - 1)] as u32 266 } 267 base(&self) -> i32268 pub fn base(&self) -> i32 { 269 self.get_const_index(NIR_INTRINSIC_BASE) as i32 270 } 271 range_base(&self) -> i32272 pub fn range_base(&self) -> i32 { 273 self.get_const_index(NIR_INTRINSIC_RANGE_BASE) as i32 274 } 275 range(&self) -> i32276 pub fn range(&self) -> i32 { 277 self.get_const_index(NIR_INTRINSIC_RANGE) as i32 278 } 279 write_mask(&self) -> u32280 pub fn write_mask(&self) -> u32 { 281 self.get_const_index(NIR_INTRINSIC_WRITE_MASK) 282 } 283 stream_id(&self) -> u32284 pub fn stream_id(&self) -> u32 { 285 self.get_const_index(NIR_INTRINSIC_STREAM_ID) 286 } 287 component(&self) -> u32288 pub fn component(&self) -> u32 { 289 self.get_const_index(NIR_INTRINSIC_COMPONENT) 290 } 291 interp_mode(&self) -> u32292 pub fn interp_mode(&self) -> u32 { 293 self.get_const_index(NIR_INTRINSIC_INTERP_MODE) 294 } 295 reduction_op(&self) -> nir_op296 pub fn reduction_op(&self) -> nir_op { 297 self.get_const_index(NIR_INTRINSIC_REDUCTION_OP) as nir_op 298 } 299 cluster_size(&self) -> u32300 pub fn cluster_size(&self) -> u32 { 301 self.get_const_index(NIR_INTRINSIC_CLUSTER_SIZE) 302 } 303 image_dim(&self) -> glsl_sampler_dim304 pub fn image_dim(&self) -> glsl_sampler_dim { 305 self.get_const_index(NIR_INTRINSIC_IMAGE_DIM) as glsl_sampler_dim 306 } 307 image_array(&self) -> bool308 pub fn image_array(&self) -> bool { 309 self.get_const_index(NIR_INTRINSIC_IMAGE_ARRAY) != 0 310 } 311 access(&self) -> gl_access_qualifier312 pub fn access(&self) -> gl_access_qualifier { 313 self.get_const_index(NIR_INTRINSIC_ACCESS) as gl_access_qualifier 314 } 315 align(&self) -> u32316 pub fn align(&self) -> u32 { 317 let mul = self.align_mul(); 318 let offset = self.align_offset(); 319 assert!(offset < mul); 320 if offset > 0 { 321 1 << offset.trailing_zeros() 322 } else { 323 mul 324 } 325 } 326 align_mul(&self) -> u32327 pub fn align_mul(&self) -> u32 { 328 self.get_const_index(NIR_INTRINSIC_ALIGN_MUL) 329 } 330 align_offset(&self) -> u32331 pub fn align_offset(&self) -> u32 { 332 self.get_const_index(NIR_INTRINSIC_ALIGN_OFFSET) 333 } 334 execution_scope(&self) -> mesa_scope335 pub fn execution_scope(&self) -> mesa_scope { 336 self.get_const_index(NIR_INTRINSIC_EXECUTION_SCOPE) 337 } 338 memory_scope(&self) -> mesa_scope339 pub fn memory_scope(&self) -> mesa_scope { 340 self.get_const_index(NIR_INTRINSIC_MEMORY_SCOPE) 341 } 342 memory_semantics(&self) -> nir_memory_semantics343 pub fn memory_semantics(&self) -> nir_memory_semantics { 344 self.get_const_index(NIR_INTRINSIC_MEMORY_SEMANTICS) 345 } 346 memory_modes(&self) -> nir_variable_mode347 pub fn memory_modes(&self) -> nir_variable_mode { 348 self.get_const_index(NIR_INTRINSIC_MEMORY_MODES) 349 } 350 flags(&self) -> u32351 pub fn flags(&self) -> u32 { 352 self.get_const_index(NIR_INTRINSIC_FLAGS) 353 } 354 atomic_op(&self) -> nir_atomic_op355 pub fn atomic_op(&self) -> nir_atomic_op { 356 self.get_const_index(NIR_INTRINSIC_ATOMIC_OP) as nir_atomic_op 357 } 358 } 359 360 impl nir_intrinsic_info { name(&self) -> &'static str361 pub fn name(&self) -> &'static str { 362 unsafe { CStr::from_ptr(self.name).to_str().expect("Invalid UTF-8") } 363 } 364 } 365 366 impl nir_load_const_instr { values<'a>(&'a self) -> &'a [nir_const_value]367 pub fn values<'a>(&'a self) -> &'a [nir_const_value] { 368 unsafe { self.value.as_slice(self.def.num_components as usize) } 369 } 370 } 371 372 impl nir_phi_src { pred<'a>(&'a self) -> &'a nir_block373 pub fn pred<'a>(&'a self) -> &'a nir_block { 374 unsafe { NonNull::new(self.pred).unwrap().as_ref() } 375 } 376 } 377 378 impl nir_phi_instr { iter_srcs(&self) -> ExecListIter<nir_phi_src>379 pub fn iter_srcs(&self) -> ExecListIter<nir_phi_src> { 380 ExecListIter::new(&self.srcs, offset_of!(nir_phi_src, node)) 381 } 382 } 383 384 impl nir_jump_instr { target<'a>(&'a self) -> Option<&'a nir_block>385 pub fn target<'a>(&'a self) -> Option<&'a nir_block> { 386 NonNull::new(self.target).map(|b| unsafe { b.as_ref() }) 387 } 388 else_target<'a>(&'a self) -> Option<&'a nir_block>389 pub fn else_target<'a>(&'a self) -> Option<&'a nir_block> { 390 NonNull::new(self.else_target).map(|b| unsafe { b.as_ref() }) 391 } 392 } 393 394 impl nir_instr { as_alu<'a>(&'a self) -> Option<&'a nir_alu_instr>395 pub fn as_alu<'a>(&'a self) -> Option<&'a nir_alu_instr> { 396 if self.type_ == nir_instr_type_alu { 397 let p = self as *const nir_instr; 398 Some(unsafe { &*(p as *const nir_alu_instr) }) 399 } else { 400 None 401 } 402 } 403 as_jump<'a>(&'a self) -> Option<&'a nir_jump_instr>404 pub fn as_jump<'a>(&'a self) -> Option<&'a nir_jump_instr> { 405 if self.type_ == nir_instr_type_jump { 406 let p = self as *const nir_instr; 407 Some(unsafe { &*(p as *const nir_jump_instr) }) 408 } else { 409 None 410 } 411 } 412 as_tex<'a>(&'a self) -> Option<&'a nir_tex_instr>413 pub fn as_tex<'a>(&'a self) -> Option<&'a nir_tex_instr> { 414 if self.type_ == nir_instr_type_tex { 415 let p = self as *const nir_instr; 416 Some(unsafe { &*(p as *const nir_tex_instr) }) 417 } else { 418 None 419 } 420 } 421 as_intrinsic<'a>(&'a self) -> Option<&'a nir_intrinsic_instr>422 pub fn as_intrinsic<'a>(&'a self) -> Option<&'a nir_intrinsic_instr> { 423 if self.type_ == nir_instr_type_intrinsic { 424 let p = self as *const nir_instr; 425 Some(unsafe { &*(p as *const nir_intrinsic_instr) }) 426 } else { 427 None 428 } 429 } 430 as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr>431 pub fn as_load_const<'a>(&'a self) -> Option<&'a nir_load_const_instr> { 432 if self.type_ == nir_instr_type_load_const { 433 let p = self as *const nir_instr; 434 Some(unsafe { &*(p as *const nir_load_const_instr) }) 435 } else { 436 None 437 } 438 } 439 as_undef<'a>(&'a self) -> Option<&'a nir_undef_instr>440 pub fn as_undef<'a>(&'a self) -> Option<&'a nir_undef_instr> { 441 if self.type_ == nir_instr_type_undef { 442 let p = self as *const nir_instr; 443 Some(unsafe { &*(p as *const nir_undef_instr) }) 444 } else { 445 None 446 } 447 } 448 as_phi<'a>(&'a self) -> Option<&'a nir_phi_instr>449 pub fn as_phi<'a>(&'a self) -> Option<&'a nir_phi_instr> { 450 if self.type_ == nir_instr_type_phi { 451 let p = self as *const nir_instr; 452 Some(unsafe { &*(p as *const nir_phi_instr) }) 453 } else { 454 None 455 } 456 } 457 def<'a>(&'a self) -> Option<&'a nir_def>458 pub fn def<'a>(&'a self) -> Option<&'a nir_def> { 459 unsafe { 460 let def = nir_instr_def(self as *const _ as *mut _); 461 NonNull::new(def).map(|d| d.as_ref()) 462 } 463 } 464 } 465 466 impl nir_block { iter_instr_list(&self) -> ExecListIter<nir_instr>467 pub fn iter_instr_list(&self) -> ExecListIter<nir_instr> { 468 ExecListIter::new(&self.instr_list, offset_of!(nir_instr, node)) 469 } 470 successors<'a>(&'a self) -> [Option<&'a nir_block>; 2]471 pub fn successors<'a>(&'a self) -> [Option<&'a nir_block>; 2] { 472 [ 473 NonNull::new(self.successors[0]).map(|b| unsafe { b.as_ref() }), 474 NonNull::new(self.successors[1]).map(|b| unsafe { b.as_ref() }), 475 ] 476 } 477 following_if<'a>(&'a self) -> Option<&'a nir_if>478 pub fn following_if<'a>(&'a self) -> Option<&'a nir_if> { 479 let self_ptr = self as *const _ as *mut _; 480 unsafe { nir_block_get_following_if(self_ptr).as_ref() } 481 } 482 following_loop<'a>(&'a self) -> Option<&'a nir_loop>483 pub fn following_loop<'a>(&'a self) -> Option<&'a nir_loop> { 484 let self_ptr = self as *const _ as *mut _; 485 unsafe { nir_block_get_following_loop(self_ptr).as_ref() } 486 } 487 parent(&self) -> &nir_cf_node488 pub fn parent(&self) -> &nir_cf_node { 489 self.cf_node.parent().unwrap() 490 } 491 } 492 493 impl nir_if { first_then_block(&self) -> &nir_block494 pub fn first_then_block(&self) -> &nir_block { 495 self.iter_then_list().next().unwrap().as_block().unwrap() 496 } 497 first_else_block(&self) -> &nir_block498 pub fn first_else_block(&self) -> &nir_block { 499 self.iter_else_list().next().unwrap().as_block().unwrap() 500 } 501 iter_then_list(&self) -> ExecListIter<nir_cf_node>502 pub fn iter_then_list(&self) -> ExecListIter<nir_cf_node> { 503 ExecListIter::new(&self.then_list, offset_of!(nir_cf_node, node)) 504 } 505 iter_else_list(&self) -> ExecListIter<nir_cf_node>506 pub fn iter_else_list(&self) -> ExecListIter<nir_cf_node> { 507 ExecListIter::new(&self.else_list, offset_of!(nir_cf_node, node)) 508 } 509 following_block(&self) -> &nir_block510 pub fn following_block(&self) -> &nir_block { 511 self.cf_node.next().unwrap().as_block().unwrap() 512 } 513 } 514 515 impl nir_loop { iter_body(&self) -> ExecListIter<nir_cf_node>516 pub fn iter_body(&self) -> ExecListIter<nir_cf_node> { 517 ExecListIter::new(&self.body, offset_of!(nir_cf_node, node)) 518 } 519 first_block(&self) -> &nir_block520 pub fn first_block(&self) -> &nir_block { 521 self.iter_body().next().unwrap().as_block().unwrap() 522 } 523 following_block(&self) -> &nir_block524 pub fn following_block(&self) -> &nir_block { 525 self.cf_node.next().unwrap().as_block().unwrap() 526 } 527 } 528 529 impl nir_cf_node { as_block<'a>(&'a self) -> Option<&'a nir_block>530 pub fn as_block<'a>(&'a self) -> Option<&'a nir_block> { 531 if self.type_ == nir_cf_node_block { 532 Some(unsafe { &*(self as *const nir_cf_node as *const nir_block) }) 533 } else { 534 None 535 } 536 } 537 as_if<'a>(&'a self) -> Option<&'a nir_if>538 pub fn as_if<'a>(&'a self) -> Option<&'a nir_if> { 539 if self.type_ == nir_cf_node_if { 540 Some(unsafe { &*(self as *const nir_cf_node as *const nir_if) }) 541 } else { 542 None 543 } 544 } 545 as_loop<'a>(&'a self) -> Option<&'a nir_loop>546 pub fn as_loop<'a>(&'a self) -> Option<&'a nir_loop> { 547 if self.type_ == nir_cf_node_loop { 548 Some(unsafe { &*(self as *const nir_cf_node as *const nir_loop) }) 549 } else { 550 None 551 } 552 } 553 next(&self) -> Option<&nir_cf_node>554 pub fn next(&self) -> Option<&nir_cf_node> { 555 let mut iter: ExecListIter<nir_cf_node> = 556 ExecListIter::at(&self.node, offset_of!(nir_cf_node, node), false); 557 iter.next() 558 } 559 prev(&self) -> Option<&nir_cf_node>560 pub fn prev(&self) -> Option<&nir_cf_node> { 561 let mut iter: ExecListIter<nir_cf_node> = 562 ExecListIter::at(&self.node, offset_of!(nir_cf_node, node), true); 563 iter.next() 564 } 565 parent<'a>(&'a self) -> Option<&'a nir_cf_node>566 pub fn parent<'a>(&'a self) -> Option<&'a nir_cf_node> { 567 NonNull::new(self.parent).map(|b| unsafe { b.as_ref() }) 568 } 569 } 570 571 impl nir_function_impl { iter_body(&self) -> ExecListIter<nir_cf_node>572 pub fn iter_body(&self) -> ExecListIter<nir_cf_node> { 573 ExecListIter::new(&self.body, offset_of!(nir_cf_node, node)) 574 } 575 end_block<'a>(&'a self) -> &'a nir_block576 pub fn end_block<'a>(&'a self) -> &'a nir_block { 577 unsafe { NonNull::new(self.end_block).unwrap().as_ref() } 578 } 579 function<'a>(&'a self) -> &'a nir_function580 pub fn function<'a>(&'a self) -> &'a nir_function { 581 unsafe { self.function.as_ref() }.unwrap() 582 } 583 } 584 585 impl nir_function { get_impl(&self) -> Option<&nir_function_impl>586 pub fn get_impl(&self) -> Option<&nir_function_impl> { 587 unsafe { self.impl_.as_ref() } 588 } 589 } 590 591 impl nir_shader { iter_functions(&self) -> ExecListIter<nir_function>592 pub fn iter_functions(&self) -> ExecListIter<nir_function> { 593 ExecListIter::new(&self.functions, offset_of!(nir_function, node)) 594 } 595 iter_variables(&self) -> ExecListIter<nir_variable>596 pub fn iter_variables(&self) -> ExecListIter<nir_variable> { 597 ExecListIter::new(&self.variables, offset_of!(nir_variable, node)) 598 } 599 } 600