Searched full:ir (Results 1 – 25 of 11789) sorted by relevance
12345678910>>...472
27 * Attempts to verify that various invariants of the IR tree are true.31 * in the ir tree. ir_variable does appear multiple times: Once as a36 #include "ir.h"68 virtual ir_visitor_status visit(ir_dereference_variable *ir);70 virtual ir_visitor_status visit_enter(ir_discard *ir);71 virtual ir_visitor_status visit_enter(ir_if *ir);73 virtual ir_visitor_status visit_enter(ir_function *ir);74 virtual ir_visitor_status visit_leave(ir_function *ir);75 virtual ir_visitor_status visit_enter(ir_function_signature *ir);76 virtual ir_visitor_status visit_enter(ir_return *ir);[all …]
86 foreach_in_list(ir_instruction, ir, instructions) { in _mesa_print_ir()87 ir->fprint(f); in _mesa_print_ir()88 if (ir->ir_type != ir_type_function) in _mesa_print_ir()97 const ir_instruction *ir = (const ir_instruction *)instruction; in fprint_ir() local98 ir->fprint(f); in fprint_ir()164 void ir_print_visitor::visit(ir_variable *ir) in visit() argument169 if (ir->data.binding) in visit()170 snprintf(binding, sizeof(binding), "binding=%i ", ir->data.binding); in visit()173 if (ir->data.location != -1) in visit()174 snprintf(loc, sizeof(loc), "location=%i ", ir->data.location); in visit()[all …]
32 #include "ir.h"46 * pass to lower GLSL IR to NIR85 void create_function(ir_function_signature *ir);89 void truncate_after_instruction(exec_node *ir);90 nir_def *evaluate_rvalue(ir_rvalue *ir);106 nir_deref_instr *evaluate_deref(ir_instruction *ir);108 nir_constant *constant_copy(ir_constant *ir, void *mem_ctx);113 /* whether the IR we're operating on is per-function or global */155 struct exec_list **ir, shader_info *si, gl_shader_stage stage, in glsl_to_nir() argument164 v2.run(*ir); in glsl_to_nir()[all …]
15 "cmd/compile/internal/ir"29 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {34 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {43 init.Append(ir.TakeInit(n)...)46 lno := ir.SetPos(n)49 ir.Dump("before walk expr", n)66 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {69 if ir.IsConst(n, constant.String) {76 ir.Dump("after walk expr", n)83 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {[all …]
12 "cmd/compile/internal/ir"20 func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {21 init.Append(ir.TakeInit(n)...)23 var left, right ir.Node25 case ir.OAS:26 n := n.(*ir.AssignStmt)28 case ir.OASOP:29 n := n.(*ir.AssignOpStmt)35 var mapAppend *ir.CallExpr36 if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {[all …]
12 "cmd/compile/internal/ir"42 out []ir.Node // list of generated statements43 temp []*ir.Name // stack of temporary variables44 free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().45 edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS50 func order(fn *ir.Func) {53 ir.DumpList(s, fn.Body)55 ir.SetPos(fn) // Set reasonable position for instrumenting code. See issue 53688.56 orderBlock(&fn.Body, map[string][]*ir.Name{})60 func (o *orderState) append(stmt ir.Node) {[all …]
9 "cmd/compile/internal/ir"20 func walkCompLit(n ir.Node, init *ir.Nodes) ir.Node {22 n := n.(*ir.CompLitExpr) // not OPTRLIT29 var_ := typecheck.TempAt(base.Pos, ir.CurFunc, n.Type())59 func readonlystaticname(t *types.Type) *ir.Name {67 func isSimpleName(nn ir.Node) bool {68 if nn.Op() != ir.ONAME || ir.IsBlank(nn) {71 n := nn.(*ir.Name)85 func getdyn(n ir.Node, top bool) initGenType {88 if ir.IsConstNode(n) {[all …]
16 "cmd/compile/internal/ir"23 func fakePC(n ir.Node) ir.Node {25 // in the calculation of the fakePC for the IR node.36 return ir.NewInt(base.Pos, int64(hash.Sum32()))42 func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {43 …if n.X.Type().IsInterface() && n.Y.Type().IsInterface() && n.X.Op() != ir.ONIL && n.Y.Op() != ir.O…72 andor := ir.OOROR73 if eq == ir.OEQ {74 andor = ir.OANDAND84 var eqtype ir.Node[all …]
16 "cmd/compile/internal/ir"44 func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {45 if !ir.SameSafeExpr(dst, n.Args[0]) {80 var l []ir.Node83 s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type())84 l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))87 num := ir.NewInt(base.Pos, int64(argc))90 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])91 …l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryEx…94 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)[all …]
11 "cmd/compile/internal/ir"40 func walkRange(nrange *ir.RangeStmt) ir.Node {46 nfor := ir.NewForStmt(nrange.Pos(), nil, nil, nil, nil, nrange.DistinctVars)59 lno := ir.SetPos(a)63 if ir.IsBlank(v2) {67 if ir.IsBlank(v1) && v2 == nil {75 var body []ir.Node76 var init []ir.Node82 hv1 := typecheck.TempAt(base.Pos, ir.CurFunc, t)83 hn := typecheck.TempAt(base.Pos, ir.CurFunc, t)[all …]
12 "cmd/compile/internal/ir"24 func Walk(fn *ir.Func) {25 ir.CurFunc = fn33 s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())34 ir.DumpList(s, ir.CurFunc.Body)37 walkStmtList(ir.CurFunc.Body)39 s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())40 ir.DumpList(s, ir.CurFunc.Body)50 func walkRecv(n *ir.UnaryExpr) ir.Node {54 init := ir.TakeInit(n)[all …]
12 "cmd/compile/internal/ir"21 func walkConv(n *ir.ConvExpr, init *ir.Nodes) ir.Node {23 if n.Op() == ir.OCONVNOP && n.Type() == n.X.Type() {26 if n.Op() == ir.OCONVNOP && ir.ShouldCheckPtr(ir.CurFunc, 1) {40 func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node {46 if !fromType.IsInterface() && !ir.IsBlank(ir.CurFunc.Nname) {49 // Unified IR uses OCONVIFACE for converting all derived types54 reflectdata.MarkTypeUsedInInterface(fromType, ir.CurFunc.LSym)60 l := ir.NewBinaryExpr(base.Pos, ir.OMAKEFACE, typeWord, dataWord(n, init))70 c := typecheck.TempAt(base.Pos, ir.CurFunc, fromType)[all …]
9 "cmd/compile/internal/ir"15 func walkStmt(n ir.Node) ir.Node {20 ir.SetPos(n)26 if n.Op() == ir.ONAME {27 n := n.(*ir.Name)32 ir.Dump("nottop", n)35 case ir.OAS,36 ir.OASOP,37 ir.OAS2,38 ir.OAS2DOTTYPE,[all …]
15 "cmd/compile/internal/ir"27 func walkSwitch(sw *ir.SwitchStmt) {34 if sw.Tag != nil && sw.Tag.Op() == ir.OTYPESW {43 func walkSwitchExpr(sw *ir.SwitchStmt) {44 lno := ir.SetPos(sw)51 cond = ir.NewBool(base.Pos, true)63 if cond.Op() == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {64 cond := cond.(*ir.ConvExpr)65 cond.SetOp(ir.OBYTES2STRTMP)69 if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {[all …]
15 "cmd/compile/internal/ir"27 Expr ir.Node // bytes of run-time computed expressions41 Out []ir.Node43 Plans map[ir.Node]*Plan44 Temps map[ir.Node]*ir.Name52 func (s *Schedule) append(n ir.Node) {57 func (s *Schedule) StaticInit(n ir.Node) {60 ir.Dump("StaticInit failed", n)69 var varToMapInit map[*ir.Name]*ir.Func74 var MapInitToVar map[*ir.Func]*ir.Name[all …]
14 "cmd/compile/internal/ir"19 func AssignExpr(n ir.Node) ir.Node { return typecheck(n, ctxExpr|ctxAssign) }20 func Expr(n ir.Node) ir.Node { return typecheck(n, ctxExpr) }21 func Stmt(n ir.Node) ir.Node { return typecheck(n, ctxStmt) }23 func Exprs(exprs []ir.Node) { typecheckslice(exprs, ctxExpr) }24 func Stmts(stmts []ir.Node) { typecheckslice(stmts, ctxStmt) }26 func Call(pos src.XPos, callee ir.Node, args []ir.Node, dots bool) ir.Node {27 call := ir.NewCallExpr(pos, ir.OCALL, callee, args)32 func Callee(n ir.Node) ir.Node {38 func tracePrint(title string, n ir.Node) func(np *ir.Node) {[all …]
9 "cmd/compile/internal/ir"22 func typecheckrangeExpr(n *ir.RangeStmt) {28 func tcAssign(n *ir.AssignStmt) {38 lhs, rhs := []ir.Node{n.X}, []ir.Node{n.Y}43 if !ir.IsBlank(n.X) {48 func tcAssignList(n *ir.AssignListStmt) {56 func assign(stmt ir.Node, lhs, rhs []ir.Node) {66 if n := lhs[i]; typ != nil && ir.DeclaredBy(n, stmt) && n.Type() == nil {96 stmt := stmt.(*ir.AssignListStmt)100 case ir.OINDEXMAP:[all …]
9 "cmd/compile/internal/ir"15 func (e *escape) expr(k hole, n ir.Node) {23 func (e *escape) exprSkipInit(k hole, n ir.Node) {28 lno := ir.SetPos(n)41 …case ir.OLITERAL, ir.ONIL, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OTYPE, ir.OMETHEXPR, ir.…44 case ir.ONAME:45 n := n.(*ir.Name)46 if n.Class == ir.PFUNC || n.Class == ir.PEXTERN {51 case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:52 n := n.(*ir.UnaryExpr)[all …]
7 from onnxscript import ir17 _TORCH_DTYPE_TO_ONNX_COMPATIBLE: dict[torch.dtype, ir.DataType] = {18 torch.bfloat16: ir.DataType.BFLOAT16,19 torch.bool: ir.DataType.BOOL,20 torch.complex128: ir.DataType.DOUBLE,21 torch.complex64: ir.DataType.FLOAT,22 torch.float16: ir.DataType.FLOAT16,23 torch.float32: ir.DataType.FLOAT,24 torch.float64: ir.DataType.DOUBLE,25 torch.float8_e4m3fn: ir.DataType.FLOAT8E4M3FN,[all …]
META-INF/ META-INF/MANIFEST.MF META-INF/compiler-hosted. ...
28 ir, unused_debug_info, errors = glue.parse_emboss_file(33 return ir36 ir = self._make_ir("struct Foo:\n"39 self.assertEqual([], type_check.annotate_types(ir))40 expression = ir.module[0].type[0].structure.field[1].location.size44 ir = self._make_ir("struct Foo:\n"47 self.assertEqual([], error.filter_errors(type_check.annotate_types(ir)),48 ir_data_utils.IrDataSerializer(ir).to_json(indent=2))49 expression = ir.module[0].type[0].structure.field[1].location.size53 ir = self._make_ir("struct Foo:\n"[all …]
META-INF/ META-INF/MANIFEST.MF META-INF/kotlin-noarg- ...
1 …IR/libLLVMCore/android_arm64_armv8-2a_cortex-a55_static/obj/external/llvm/lib/IR/AsmWriter.o out/s…
37 "cmd/compile/internal/ir"65 hasHotCall = make(map[*ir.Func]struct{})84 func IsPgoHotFunc(fn *ir.Func, profile *pgoir.Profile) bool {88 if n, ok := profile.WeightedCG.IRNodes[ir.LinkFuncName(fn)]; ok {95 func HasPgoHotInline(fn *ir.Func) bool {100 // PGOInlinePrologue records the hot callsites from ir-graph.159 func CanInlineFuncs(funcs []*ir.Func, profile *pgoir.Profile) {168 ir.VisitFuncsBottomUp(funcs, func(funcs []*ir.Func, recursive bool) {179 fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(fn), fn.Nname)196 liveFuncs := make(map[*ir.Func]bool)[all …]