1 | // Copyright 2013 The Go Authors. All rights reserved. |
---|---|
2 | // Use of this source code is governed by a BSD-style |
3 | // license that can be found in the LICENSE file. |
4 | |
5 | package pointer |
6 | |
7 | // This file defines the constraint generation phase. |
8 | |
9 | // TODO(adonovan): move the constraint definitions and the store() etc |
10 | // functions which add them (and are also used by the solver) into a |
11 | // new file, constraints.go. |
12 | |
13 | import ( |
14 | "fmt" |
15 | "go/token" |
16 | "go/types" |
17 | "strings" |
18 | |
19 | "golang.org/x/tools/go/callgraph" |
20 | "golang.org/x/tools/go/ssa" |
21 | "golang.org/x/tools/internal/typeparams" |
22 | ) |
23 | |
24 | var ( |
25 | tEface = types.NewInterfaceType(nil, nil).Complete() |
26 | tInvalid = types.Typ[types.Invalid] |
27 | tUnsafePtr = types.Typ[types.UnsafePointer] |
28 | ) |
29 | |
30 | // ---------- Node creation ---------- |
31 | |
32 | // nextNode returns the index of the next unused node. |
33 | func (a *analysis) nextNode() nodeid { |
34 | return nodeid(len(a.nodes)) |
35 | } |
36 | |
37 | // addNodes creates nodes for all scalar elements in type typ, and |
38 | // returns the id of the first one, or zero if the type was |
39 | // analytically uninteresting. |
40 | // |
41 | // comment explains the origin of the nodes, as a debugging aid. |
42 | func (a *analysis) addNodes(typ types.Type, comment string) nodeid { |
43 | id := a.nextNode() |
44 | for _, fi := range a.flatten(typ) { |
45 | a.addOneNode(fi.typ, comment, fi) |
46 | } |
47 | if id == a.nextNode() { |
48 | return 0 // type contained no pointers |
49 | } |
50 | return id |
51 | } |
52 | |
53 | // addOneNode creates a single node with type typ, and returns its id. |
54 | // |
55 | // typ should generally be scalar (except for tagged.T nodes |
56 | // and struct/array identity nodes). Use addNodes for non-scalar types. |
57 | // |
58 | // comment explains the origin of the nodes, as a debugging aid. |
59 | // subelement indicates the subelement, e.g. ".a.b[*].c". |
60 | func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid { |
61 | id := a.nextNode() |
62 | a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)}) |
63 | if a.log != nil { |
64 | fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n", |
65 | id, typ, comment, subelement.path()) |
66 | } |
67 | return id |
68 | } |
69 | |
70 | // setValueNode associates node id with the value v. |
71 | // cgn identifies the context iff v is a local variable. |
72 | func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) { |
73 | if cgn != nil { |
74 | a.localval[v] = id |
75 | } else { |
76 | a.globalval[v] = id |
77 | } |
78 | if a.log != nil { |
79 | fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v) |
80 | } |
81 | |
82 | // Due to context-sensitivity, we may encounter the same Value |
83 | // in many contexts. We merge them to a canonical node, since |
84 | // that's what all clients want. |
85 | |
86 | // Record the (v, id) relation if the client has queried pts(v). |
87 | if _, ok := a.config.Queries[v]; ok { |
88 | t := v.Type() |
89 | ptr, ok := a.result.Queries[v] |
90 | if !ok { |
91 | // First time? Create the canonical query node. |
92 | ptr = Pointer{a, a.addNodes(t, "query")} |
93 | a.result.Queries[v] = ptr |
94 | } |
95 | a.result.Queries[v] = ptr |
96 | a.copy(ptr.n, id, a.sizeof(t)) |
97 | } |
98 | |
99 | // Record the (*v, id) relation if the client has queried pts(*v). |
100 | if _, ok := a.config.IndirectQueries[v]; ok { |
101 | t := v.Type() |
102 | ptr, ok := a.result.IndirectQueries[v] |
103 | if !ok { |
104 | // First time? Create the canonical indirect query node. |
105 | ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")} |
106 | a.result.IndirectQueries[v] = ptr |
107 | } |
108 | a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t)) |
109 | } |
110 | |
111 | for _, query := range a.config.extendedQueries[v] { |
112 | t, nid := a.evalExtendedQuery(v.Type().Underlying(), id, query.ops) |
113 | |
114 | if query.ptr.a == nil { |
115 | query.ptr.a = a |
116 | query.ptr.n = a.addNodes(t, "query.extended") |
117 | } |
118 | a.copy(query.ptr.n, nid, a.sizeof(t)) |
119 | } |
120 | } |
121 | |
122 | // endObject marks the end of a sequence of calls to addNodes denoting |
123 | // a single object allocation. |
124 | // |
125 | // obj is the start node of the object, from a prior call to nextNode. |
126 | // Its size, flags and optional data will be updated. |
127 | func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object { |
128 | // Ensure object is non-empty by padding; |
129 | // the pad will be the object node. |
130 | size := uint32(a.nextNode() - obj) |
131 | if size == 0 { |
132 | a.addOneNode(tInvalid, "padding", nil) |
133 | } |
134 | objNode := a.nodes[obj] |
135 | o := &object{ |
136 | size: size, // excludes padding |
137 | cgn: cgn, |
138 | data: data, |
139 | } |
140 | objNode.obj = o |
141 | |
142 | return o |
143 | } |
144 | |
145 | // makeFunctionObject creates and returns a new function object |
146 | // (contour) for fn, and returns the id of its first node. It also |
147 | // enqueues fn for subsequent constraint generation. |
148 | // |
149 | // For a context-sensitive contour, callersite identifies the sole |
150 | // callsite; for shared contours, caller is nil. |
151 | func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid { |
152 | if a.log != nil { |
153 | fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn) |
154 | } |
155 | |
156 | // obj is the function object (identity, params, results). |
157 | obj := a.nextNode() |
158 | cgn := a.makeCGNode(fn, obj, callersite) |
159 | sig := fn.Signature |
160 | a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type) |
161 | if recv := sig.Recv(); recv != nil { |
162 | a.addNodes(recv.Type(), "func.recv") |
163 | } |
164 | a.addNodes(sig.Params(), "func.params") |
165 | a.addNodes(sig.Results(), "func.results") |
166 | a.endObject(obj, cgn, fn).flags |= otFunction |
167 | |
168 | if a.log != nil { |
169 | fmt.Fprintf(a.log, "\t----\n") |
170 | } |
171 | |
172 | // Queue it up for constraint processing. |
173 | a.genq = append(a.genq, cgn) |
174 | |
175 | return obj |
176 | } |
177 | |
178 | // makeTagged creates a tagged object of type typ. |
179 | func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid { |
180 | obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar! |
181 | a.addNodes(typ, "tagged.v") |
182 | a.endObject(obj, cgn, data).flags |= otTagged |
183 | return obj |
184 | } |
185 | |
186 | // makeRtype returns the canonical tagged object of type *rtype whose |
187 | // payload points to the sole rtype object for T. |
188 | // |
189 | // TODO(adonovan): move to reflect.go; it's part of the solver really. |
190 | func (a *analysis) makeRtype(T types.Type) nodeid { |
191 | if v := a.rtypes.At(T); v != nil { |
192 | return v.(nodeid) |
193 | } |
194 | |
195 | // Create the object for the reflect.rtype itself, which is |
196 | // ordinarily a large struct but here a single node will do. |
197 | obj := a.nextNode() |
198 | a.addOneNode(T, "reflect.rtype", nil) |
199 | a.endObject(obj, nil, T) |
200 | |
201 | id := a.makeTagged(a.reflectRtypePtr, nil, T) |
202 | a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton) |
203 | a.addressOf(a.reflectRtypePtr, id+1, obj) |
204 | |
205 | a.rtypes.Set(T, id) |
206 | return id |
207 | } |
208 | |
209 | // rtypeValue returns the type of the *reflect.rtype-tagged object obj. |
210 | func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type { |
211 | tDyn, t, _ := a.taggedValue(obj) |
212 | if tDyn != a.reflectRtypePtr { |
213 | panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t)) |
214 | } |
215 | return a.nodes[t].typ |
216 | } |
217 | |
218 | // valueNode returns the id of the value node for v, creating it (and |
219 | // the association) as needed. It may return zero for uninteresting |
220 | // values containing no pointers. |
221 | func (a *analysis) valueNode(v ssa.Value) nodeid { |
222 | // Value nodes for locals are created en masse by genFunc. |
223 | if id, ok := a.localval[v]; ok { |
224 | return id |
225 | } |
226 | |
227 | // Value nodes for globals are created on demand. |
228 | id, ok := a.globalval[v] |
229 | if !ok { |
230 | var comment string |
231 | if a.log != nil { |
232 | comment = v.String() |
233 | } |
234 | id = a.addNodes(v.Type(), comment) |
235 | if obj := a.objectNode(nil, v); obj != 0 { |
236 | a.addressOf(v.Type(), id, obj) |
237 | } |
238 | a.setValueNode(v, id, nil) |
239 | } |
240 | return id |
241 | } |
242 | |
243 | // valueOffsetNode ascertains the node for tuple/struct value v, |
244 | // then returns the node for its subfield #index. |
245 | func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid { |
246 | id := a.valueNode(v) |
247 | if id == 0 { |
248 | panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v)) |
249 | } |
250 | return id + nodeid(a.offsetOf(v.Type(), index)) |
251 | } |
252 | |
253 | // isTaggedObject reports whether object obj is a tagged object. |
254 | func (a *analysis) isTaggedObject(obj nodeid) bool { |
255 | return a.nodes[obj].obj.flags&otTagged != 0 |
256 | } |
257 | |
258 | // taggedValue returns the dynamic type tag, the (first node of the) |
259 | // payload, and the indirect flag of the tagged object starting at id. |
260 | // Panic ensues if !isTaggedObject(id). |
261 | func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) { |
262 | n := a.nodes[obj] |
263 | flags := n.obj.flags |
264 | if flags&otTagged == 0 { |
265 | panic(fmt.Sprintf("not a tagged object: n%d", obj)) |
266 | } |
267 | return n.typ, obj + 1, flags&otIndirect != 0 |
268 | } |
269 | |
270 | // funcParams returns the first node of the params (P) block of the |
271 | // function whose object node (obj.flags&otFunction) is id. |
272 | func (a *analysis) funcParams(id nodeid) nodeid { |
273 | n := a.nodes[id] |
274 | if n.obj == nil || n.obj.flags&otFunction == 0 { |
275 | panic(fmt.Sprintf("funcParams(n%d): not a function object block", id)) |
276 | } |
277 | return id + 1 |
278 | } |
279 | |
280 | // funcResults returns the first node of the results (R) block of the |
281 | // function whose object node (obj.flags&otFunction) is id. |
282 | func (a *analysis) funcResults(id nodeid) nodeid { |
283 | n := a.nodes[id] |
284 | if n.obj == nil || n.obj.flags&otFunction == 0 { |
285 | panic(fmt.Sprintf("funcResults(n%d): not a function object block", id)) |
286 | } |
287 | sig := n.typ.(*types.Signature) |
288 | id += 1 + nodeid(a.sizeof(sig.Params())) |
289 | if sig.Recv() != nil { |
290 | id += nodeid(a.sizeof(sig.Recv().Type())) |
291 | } |
292 | return id |
293 | } |
294 | |
295 | // ---------- Constraint creation ---------- |
296 | |
297 | // copy creates a constraint of the form dst = src. |
298 | // sizeof is the width (in logical fields) of the copied type. |
299 | func (a *analysis) copy(dst, src nodeid, sizeof uint32) { |
300 | if src == dst || sizeof == 0 { |
301 | return // trivial |
302 | } |
303 | if src == 0 || dst == 0 { |
304 | panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src)) |
305 | } |
306 | for i := uint32(0); i < sizeof; i++ { |
307 | a.addConstraint(©Constraint{dst, src}) |
308 | src++ |
309 | dst++ |
310 | } |
311 | } |
312 | |
313 | // addressOf creates a constraint of the form id = &obj. |
314 | // T is the type of the address. |
315 | func (a *analysis) addressOf(T types.Type, id, obj nodeid) { |
316 | if id == 0 { |
317 | panic("addressOf: zero id") |
318 | } |
319 | if obj == 0 { |
320 | panic("addressOf: zero obj") |
321 | } |
322 | if a.shouldTrack(T) { |
323 | a.addConstraint(&addrConstraint{id, obj}) |
324 | } |
325 | } |
326 | |
327 | // load creates a load constraint of the form dst = src[offset]. |
328 | // offset is the pointer offset in logical fields. |
329 | // sizeof is the width (in logical fields) of the loaded type. |
330 | func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) { |
331 | if dst == 0 { |
332 | return // load of non-pointerlike value |
333 | } |
334 | if src == 0 && dst == 0 { |
335 | return // non-pointerlike operation |
336 | } |
337 | if src == 0 || dst == 0 { |
338 | panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src)) |
339 | } |
340 | for i := uint32(0); i < sizeof; i++ { |
341 | a.addConstraint(&loadConstraint{offset, dst, src}) |
342 | offset++ |
343 | dst++ |
344 | } |
345 | } |
346 | |
347 | // store creates a store constraint of the form dst[offset] = src. |
348 | // offset is the pointer offset in logical fields. |
349 | // sizeof is the width (in logical fields) of the stored type. |
350 | func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) { |
351 | if src == 0 { |
352 | return // store of non-pointerlike value |
353 | } |
354 | if src == 0 && dst == 0 { |
355 | return // non-pointerlike operation |
356 | } |
357 | if src == 0 || dst == 0 { |
358 | panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src)) |
359 | } |
360 | for i := uint32(0); i < sizeof; i++ { |
361 | a.addConstraint(&storeConstraint{offset, dst, src}) |
362 | offset++ |
363 | src++ |
364 | } |
365 | } |
366 | |
367 | // offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset. |
368 | // offset is the field offset in logical fields. |
369 | // T is the type of the address. |
370 | func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) { |
371 | if !a.shouldTrack(T) { |
372 | return |
373 | } |
374 | if offset == 0 { |
375 | // Simplify dst = &src->f0 |
376 | // to dst = src |
377 | // (NB: this optimisation is defeated by the identity |
378 | // field prepended to struct and array objects.) |
379 | a.copy(dst, src, 1) |
380 | } else { |
381 | a.addConstraint(&offsetAddrConstraint{offset, dst, src}) |
382 | } |
383 | } |
384 | |
385 | // typeAssert creates a typeFilter or untag constraint of the form dst = src.(T): |
386 | // typeFilter for an interface, untag for a concrete type. |
387 | // The exact flag is specified as for untagConstraint. |
388 | func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) { |
389 | if isInterface(T) { |
390 | a.addConstraint(&typeFilterConstraint{T, dst, src}) |
391 | } else { |
392 | a.addConstraint(&untagConstraint{T, dst, src, exact}) |
393 | } |
394 | } |
395 | |
396 | // addConstraint adds c to the constraint set. |
397 | func (a *analysis) addConstraint(c constraint) { |
398 | a.constraints = append(a.constraints, c) |
399 | if a.log != nil { |
400 | fmt.Fprintf(a.log, "\t%s\n", c) |
401 | } |
402 | } |
403 | |
404 | // copyElems generates load/store constraints for *dst = *src, |
405 | // where src and dst are slices or *arrays. |
406 | func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) { |
407 | tmp := a.addNodes(typ, "copy") |
408 | sz := a.sizeof(typ) |
409 | a.genLoad(cgn, tmp, src, 1, sz) |
410 | a.genStore(cgn, dst, tmp, 1, sz) |
411 | } |
412 | |
413 | // ---------- Constraint generation ---------- |
414 | |
415 | // genConv generates constraints for the conversion operation conv. |
416 | func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) { |
417 | res := a.valueNode(conv) |
418 | if res == 0 { |
419 | return // result is non-pointerlike |
420 | } |
421 | |
422 | tSrc := conv.X.Type() |
423 | tDst := conv.Type() |
424 | |
425 | switch utSrc := tSrc.Underlying().(type) { |
426 | case *types.Slice: |
427 | // []byte/[]rune -> string? |
428 | return |
429 | |
430 | case *types.Pointer: |
431 | // *T -> unsafe.Pointer? |
432 | if tDst.Underlying() == tUnsafePtr { |
433 | return // we don't model unsafe aliasing (unsound) |
434 | } |
435 | |
436 | case *types.Basic: |
437 | switch tDst.Underlying().(type) { |
438 | case *types.Pointer: |
439 | // Treat unsafe.Pointer->*T conversions like |
440 | // new(T) and create an unaliased object. |
441 | if utSrc == tUnsafePtr { |
442 | obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion") |
443 | a.endObject(obj, cgn, conv) |
444 | a.addressOf(tDst, res, obj) |
445 | return |
446 | } |
447 | |
448 | case *types.Slice: |
449 | // string -> []byte/[]rune (or named aliases)? |
450 | if utSrc.Info()&types.IsString != 0 { |
451 | obj := a.addNodes(sliceToArray(tDst), "convert") |
452 | a.endObject(obj, cgn, conv) |
453 | a.addressOf(tDst, res, obj) |
454 | return |
455 | } |
456 | |
457 | case *types.Basic: |
458 | // All basic-to-basic type conversions are no-ops. |
459 | // This includes uintptr<->unsafe.Pointer conversions, |
460 | // which we (unsoundly) ignore. |
461 | return |
462 | } |
463 | } |
464 | |
465 | panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent())) |
466 | } |
467 | |
468 | // genAppend generates constraints for a call to append. |
469 | func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) { |
470 | // Consider z = append(x, y). y is optional. |
471 | // This may allocate a new [1]T array; call its object w. |
472 | // We get the following constraints: |
473 | // z = x |
474 | // z = &w |
475 | // *z = *y |
476 | |
477 | x := instr.Call.Args[0] |
478 | |
479 | z := instr |
480 | a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x |
481 | |
482 | if len(instr.Call.Args) == 1 { |
483 | return // no allocation for z = append(x) or _ = append(x). |
484 | } |
485 | |
486 | // TODO(adonovan): test append([]byte, ...string) []byte. |
487 | |
488 | y := instr.Call.Args[1] |
489 | tArray := sliceToArray(instr.Call.Args[0].Type()) |
490 | |
491 | w := a.nextNode() |
492 | a.addNodes(tArray, "append") |
493 | a.endObject(w, cgn, instr) |
494 | |
495 | a.copyElems(cgn, tArray.Elem(), z, y) // *z = *y |
496 | a.addressOf(instr.Type(), a.valueNode(z), w) // z = &w |
497 | } |
498 | |
499 | // genBuiltinCall generates constraints for a call to a built-in. |
500 | func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) { |
501 | call := instr.Common() |
502 | switch call.Value.(*ssa.Builtin).Name() { |
503 | case "append": |
504 | // Safe cast: append cannot appear in a go or defer statement. |
505 | a.genAppend(instr.(*ssa.Call), cgn) |
506 | |
507 | case "copy": |
508 | tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem() |
509 | a.copyElems(cgn, tElem, call.Args[0], call.Args[1]) |
510 | |
511 | case "panic": |
512 | a.copy(a.panicNode, a.valueNode(call.Args[0]), 1) |
513 | |
514 | case "recover": |
515 | if v := instr.Value(); v != nil { |
516 | a.copy(a.valueNode(v), a.panicNode, 1) |
517 | } |
518 | |
519 | case "print": |
520 | // In the tests, the probe might be the sole reference |
521 | // to its arg, so make sure we create nodes for it. |
522 | if len(call.Args) > 0 { |
523 | a.valueNode(call.Args[0]) |
524 | } |
525 | |
526 | case "ssa:wrapnilchk": |
527 | a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1) |
528 | |
529 | default: |
530 | // No-ops: close len cap real imag complex print println delete. |
531 | } |
532 | } |
533 | |
534 | // shouldUseContext defines the context-sensitivity policy. It |
535 | // returns true if we should analyse all static calls to fn anew. |
536 | // |
537 | // Obviously this interface rather limits how much freedom we have to |
538 | // choose a policy. The current policy, rather arbitrarily, is true |
539 | // for intrinsics and accessor methods (actually: short, single-block, |
540 | // call-free functions). This is just a starting point. |
541 | func (a *analysis) shouldUseContext(fn *ssa.Function) bool { |
542 | if a.findIntrinsic(fn) != nil { |
543 | return true // treat intrinsics context-sensitively |
544 | } |
545 | if len(fn.Blocks) != 1 { |
546 | return false // too expensive |
547 | } |
548 | blk := fn.Blocks[0] |
549 | if len(blk.Instrs) > 10 { |
550 | return false // too expensive |
551 | } |
552 | if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) { |
553 | return true // treat synthetic wrappers context-sensitively |
554 | } |
555 | for _, instr := range blk.Instrs { |
556 | switch instr := instr.(type) { |
557 | case ssa.CallInstruction: |
558 | // Disallow function calls (except to built-ins) |
559 | // because of the danger of unbounded recursion. |
560 | if _, ok := instr.Common().Value.(*ssa.Builtin); !ok { |
561 | return false |
562 | } |
563 | } |
564 | } |
565 | return true |
566 | } |
567 | |
568 | // genStaticCall generates constraints for a statically dispatched function call. |
569 | func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
570 | fn := call.StaticCallee() |
571 | |
572 | // Special cases for inlined intrinsics. |
573 | switch fn { |
574 | case a.runtimeSetFinalizer: |
575 | // Inline SetFinalizer so the call appears direct. |
576 | site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil) |
577 | a.addConstraint(&runtimeSetFinalizerConstraint{ |
578 | targets: site.targets, |
579 | x: a.valueNode(call.Args[0]), |
580 | f: a.valueNode(call.Args[1]), |
581 | }) |
582 | return |
583 | |
584 | case a.reflectValueCall: |
585 | // Inline (reflect.Value).Call so the call appears direct. |
586 | dotdotdot := false |
587 | ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot) |
588 | if result != 0 { |
589 | a.addressOf(fn.Signature.Results().At(0).Type(), result, ret) |
590 | } |
591 | return |
592 | } |
593 | |
594 | // Ascertain the context (contour/cgnode) for a particular call. |
595 | var obj nodeid |
596 | if a.shouldUseContext(fn) { |
597 | obj = a.makeFunctionObject(fn, site) // new contour |
598 | } else { |
599 | obj = a.objectNode(nil, fn) // shared contour |
600 | } |
601 | a.callEdge(caller, site, obj) |
602 | |
603 | sig := call.Signature() |
604 | |
605 | // Copy receiver, if any. |
606 | params := a.funcParams(obj) |
607 | args := call.Args |
608 | if sig.Recv() != nil { |
609 | sz := a.sizeof(sig.Recv().Type()) |
610 | a.copy(params, a.valueNode(args[0]), sz) |
611 | params += nodeid(sz) |
612 | args = args[1:] |
613 | } |
614 | |
615 | // Copy actual parameters into formal params block. |
616 | // Must loop, since the actuals aren't contiguous. |
617 | for i, arg := range args { |
618 | sz := a.sizeof(sig.Params().At(i).Type()) |
619 | a.copy(params, a.valueNode(arg), sz) |
620 | params += nodeid(sz) |
621 | } |
622 | |
623 | // Copy formal results block to actual result. |
624 | if result != 0 { |
625 | a.copy(result, a.funcResults(obj), a.sizeof(sig.Results())) |
626 | } |
627 | } |
628 | |
629 | // genDynamicCall generates constraints for a dynamic function call. |
630 | func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
631 | // pts(targets) will be the set of possible call targets. |
632 | site.targets = a.valueNode(call.Value) |
633 | |
634 | // We add dynamic closure rules that store the arguments into |
635 | // the P-block and load the results from the R-block of each |
636 | // function discovered in pts(targets). |
637 | |
638 | sig := call.Signature() |
639 | var offset uint32 = 1 // P/R block starts at offset 1 |
640 | for i, arg := range call.Args { |
641 | sz := a.sizeof(sig.Params().At(i).Type()) |
642 | a.genStore(caller, call.Value, a.valueNode(arg), offset, sz) |
643 | offset += sz |
644 | } |
645 | if result != 0 { |
646 | a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results())) |
647 | } |
648 | } |
649 | |
650 | // genInvoke generates constraints for a dynamic method invocation. |
651 | func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
652 | if call.Value.Type() == a.reflectType { |
653 | a.genInvokeReflectType(caller, site, call, result) |
654 | return |
655 | } |
656 | |
657 | sig := call.Signature() |
658 | |
659 | // Allocate a contiguous targets/params/results block for this call. |
660 | block := a.nextNode() |
661 | // pts(targets) will be the set of possible call targets |
662 | site.targets = a.addOneNode(sig, "invoke.targets", nil) |
663 | p := a.addNodes(sig.Params(), "invoke.params") |
664 | r := a.addNodes(sig.Results(), "invoke.results") |
665 | |
666 | // Copy the actual parameters into the call's params block. |
667 | for i, n := 0, sig.Params().Len(); i < n; i++ { |
668 | sz := a.sizeof(sig.Params().At(i).Type()) |
669 | a.copy(p, a.valueNode(call.Args[i]), sz) |
670 | p += nodeid(sz) |
671 | } |
672 | // Copy the call's results block to the actual results. |
673 | if result != 0 { |
674 | a.copy(result, r, a.sizeof(sig.Results())) |
675 | } |
676 | |
677 | // We add a dynamic invoke constraint that will connect the |
678 | // caller's and the callee's P/R blocks for each discovered |
679 | // call target. |
680 | a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block}) |
681 | } |
682 | |
683 | // genInvokeReflectType is a specialization of genInvoke where the |
684 | // receiver type is a reflect.Type, under the assumption that there |
685 | // can be at most one implementation of this interface, *reflect.rtype. |
686 | // |
687 | // (Though this may appear to be an instance of a pattern---method |
688 | // calls on interfaces known to have exactly one implementation---in |
689 | // practice it occurs rarely, so we special case for reflect.Type.) |
690 | // |
691 | // In effect we treat this: |
692 | // |
693 | // var rt reflect.Type = ... |
694 | // rt.F() |
695 | // |
696 | // as this: |
697 | // |
698 | // rt.(*reflect.rtype).F() |
699 | func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { |
700 | // Unpack receiver into rtype |
701 | rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil) |
702 | recv := a.valueNode(call.Value) |
703 | a.typeAssert(a.reflectRtypePtr, rtype, recv, true) |
704 | |
705 | // Look up the concrete method. |
706 | fn := a.prog.LookupMethod(a.reflectRtypePtr, call.Method.Pkg(), call.Method.Name()) |
707 | |
708 | obj := a.makeFunctionObject(fn, site) // new contour for this call |
709 | a.callEdge(caller, site, obj) |
710 | |
711 | // From now on, it's essentially a static call, but little is |
712 | // gained by factoring together the code for both cases. |
713 | |
714 | sig := fn.Signature // concrete method |
715 | targets := a.addOneNode(sig, "call.targets", nil) |
716 | a.addressOf(sig, targets, obj) // (a singleton) |
717 | |
718 | // Copy receiver. |
719 | params := a.funcParams(obj) |
720 | a.copy(params, rtype, 1) |
721 | params++ |
722 | |
723 | // Copy actual parameters into formal P-block. |
724 | // Must loop, since the actuals aren't contiguous. |
725 | for i, arg := range call.Args { |
726 | sz := a.sizeof(sig.Params().At(i).Type()) |
727 | a.copy(params, a.valueNode(arg), sz) |
728 | params += nodeid(sz) |
729 | } |
730 | |
731 | // Copy formal R-block to actual R-block. |
732 | if result != 0 { |
733 | a.copy(result, a.funcResults(obj), a.sizeof(sig.Results())) |
734 | } |
735 | } |
736 | |
737 | // genCall generates constraints for call instruction instr. |
738 | func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) { |
739 | call := instr.Common() |
740 | |
741 | // Intrinsic implementations of built-in functions. |
742 | if _, ok := call.Value.(*ssa.Builtin); ok { |
743 | a.genBuiltinCall(instr, caller) |
744 | return |
745 | } |
746 | |
747 | var result nodeid |
748 | if v := instr.Value(); v != nil { |
749 | result = a.valueNode(v) |
750 | } |
751 | |
752 | site := &callsite{instr: instr} |
753 | if call.StaticCallee() != nil { |
754 | a.genStaticCall(caller, site, call, result) |
755 | } else if call.IsInvoke() { |
756 | a.genInvoke(caller, site, call, result) |
757 | } else { |
758 | a.genDynamicCall(caller, site, call, result) |
759 | } |
760 | |
761 | caller.sites = append(caller.sites, site) |
762 | |
763 | if a.log != nil { |
764 | // TODO(adonovan): debug: improve log message. |
765 | fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller) |
766 | } |
767 | } |
768 | |
769 | // objectNode returns the object to which v points, if known. |
770 | // In other words, if the points-to set of v is a singleton, it |
771 | // returns the sole label, zero otherwise. |
772 | // |
773 | // We exploit this information to make the generated constraints less |
774 | // dynamic. For example, a complex load constraint can be replaced by |
775 | // a simple copy constraint when the sole destination is known a priori. |
776 | // |
777 | // Some SSA instructions always have singletons points-to sets: |
778 | // |
779 | // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice. |
780 | // |
781 | // Others may be singletons depending on their operands: |
782 | // |
783 | // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice, SliceToArrayPointer. |
784 | // |
785 | // Idempotent. Objects are created as needed, possibly via recursion |
786 | // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))). |
787 | func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid { |
788 | switch v.(type) { |
789 | case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar: |
790 | // Global object. |
791 | obj, ok := a.globalobj[v] |
792 | if !ok { |
793 | switch v := v.(type) { |
794 | case *ssa.Global: |
795 | obj = a.nextNode() |
796 | a.addNodes(mustDeref(v.Type()), "global") |
797 | a.endObject(obj, nil, v) |
798 | |
799 | case *ssa.Function: |
800 | obj = a.makeFunctionObject(v, nil) |
801 | |
802 | case *ssa.Const: |
803 | // not addressable |
804 | |
805 | case *ssa.FreeVar: |
806 | // not addressable |
807 | } |
808 | |
809 | if a.log != nil { |
810 | fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj) |
811 | } |
812 | a.globalobj[v] = obj |
813 | } |
814 | return obj |
815 | } |
816 | |
817 | // Local object. |
818 | obj, ok := a.localobj[v] |
819 | if !ok { |
820 | switch v := v.(type) { |
821 | case *ssa.Alloc: |
822 | obj = a.nextNode() |
823 | a.addNodes(mustDeref(v.Type()), "alloc") |
824 | a.endObject(obj, cgn, v) |
825 | |
826 | case *ssa.MakeSlice: |
827 | obj = a.nextNode() |
828 | a.addNodes(sliceToArray(v.Type()), "makeslice") |
829 | a.endObject(obj, cgn, v) |
830 | |
831 | case *ssa.MakeChan: |
832 | obj = a.nextNode() |
833 | a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan") |
834 | a.endObject(obj, cgn, v) |
835 | |
836 | case *ssa.MakeMap: |
837 | obj = a.nextNode() |
838 | tmap := v.Type().Underlying().(*types.Map) |
839 | a.addNodes(tmap.Key(), "makemap.key") |
840 | elem := a.addNodes(tmap.Elem(), "makemap.value") |
841 | |
842 | // To update the value field, MapUpdate |
843 | // generates store-with-offset constraints which |
844 | // the presolver can't model, so we must mark |
845 | // those nodes indirect. |
846 | for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ { |
847 | a.mapValues = append(a.mapValues, id) |
848 | } |
849 | a.endObject(obj, cgn, v) |
850 | |
851 | case *ssa.MakeInterface: |
852 | tConc := v.X.Type() |
853 | obj = a.makeTagged(tConc, cgn, v) |
854 | |
855 | // Copy the value into it, if nontrivial. |
856 | if x := a.valueNode(v.X); x != 0 { |
857 | a.copy(obj+1, x, a.sizeof(tConc)) |
858 | } |
859 | |
860 | case *ssa.FieldAddr: |
861 | if xobj := a.objectNode(cgn, v.X); xobj != 0 { |
862 | obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field)) |
863 | } |
864 | |
865 | case *ssa.IndexAddr: |
866 | if xobj := a.objectNode(cgn, v.X); xobj != 0 { |
867 | obj = xobj + 1 |
868 | } |
869 | |
870 | case *ssa.Slice: |
871 | obj = a.objectNode(cgn, v.X) |
872 | |
873 | case *ssa.SliceToArrayPointer: |
874 | // Going from a []T to a *[k]T for some k. |
875 | // A slice []T is treated as if it were a *T pointer. |
876 | obj = a.objectNode(cgn, v.X) |
877 | |
878 | case *ssa.Convert: |
879 | // TODO(adonovan): opt: handle these cases too: |
880 | // - unsafe.Pointer->*T conversion acts like Alloc |
881 | // - string->[]byte/[]rune conversion acts like MakeSlice |
882 | } |
883 | |
884 | if a.log != nil { |
885 | fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj) |
886 | } |
887 | a.localobj[v] = obj |
888 | } |
889 | return obj |
890 | } |
891 | |
892 | // genLoad generates constraints for result = *(ptr + val). |
893 | func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) { |
894 | if obj := a.objectNode(cgn, ptr); obj != 0 { |
895 | // Pre-apply loadConstraint.solve(). |
896 | a.copy(result, obj+nodeid(offset), sizeof) |
897 | } else { |
898 | a.load(result, a.valueNode(ptr), offset, sizeof) |
899 | } |
900 | } |
901 | |
902 | // genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr) |
903 | // or 'v=ptr[*]' (IndexAddr) instruction v. |
904 | func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) { |
905 | dst := a.valueNode(v) |
906 | if obj := a.objectNode(cgn, v); obj != 0 { |
907 | // Pre-apply offsetAddrConstraint.solve(). |
908 | a.addressOf(v.Type(), dst, obj) |
909 | } else { |
910 | a.offsetAddr(v.Type(), dst, ptr, offset) |
911 | } |
912 | } |
913 | |
914 | // genStore generates constraints for *(ptr + offset) = val. |
915 | func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) { |
916 | if obj := a.objectNode(cgn, ptr); obj != 0 { |
917 | // Pre-apply storeConstraint.solve(). |
918 | a.copy(obj+nodeid(offset), val, sizeof) |
919 | } else { |
920 | a.store(a.valueNode(ptr), val, offset, sizeof) |
921 | } |
922 | } |
923 | |
924 | // genInstr generates constraints for instruction instr in context cgn. |
925 | func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) { |
926 | if a.log != nil { |
927 | var prefix string |
928 | if val, ok := instr.(ssa.Value); ok { |
929 | prefix = val.Name() + " = " |
930 | } |
931 | fmt.Fprintf(a.log, "; %s%s\n", prefix, instr) |
932 | } |
933 | |
934 | switch instr := instr.(type) { |
935 | case *ssa.DebugRef: |
936 | // no-op. |
937 | |
938 | case *ssa.UnOp: |
939 | switch instr.Op { |
940 | case token.ARROW: // <-x |
941 | // We can ignore instr.CommaOk because the node we're |
942 | // altering is always at zero offset relative to instr |
943 | tElem := instr.X.Type().Underlying().(*types.Chan).Elem() |
944 | a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem)) |
945 | |
946 | case token.MUL: // *x |
947 | a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type())) |
948 | |
949 | default: |
950 | // NOT, SUB, XOR: no-op. |
951 | } |
952 | |
953 | case *ssa.BinOp: |
954 | // All no-ops. |
955 | |
956 | case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer |
957 | a.genCall(cgn, instr) |
958 | |
959 | case *ssa.ChangeType: |
960 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
961 | |
962 | case *ssa.Convert: |
963 | a.genConv(instr, cgn) |
964 | |
965 | case *ssa.Extract: |
966 | a.copy(a.valueNode(instr), |
967 | a.valueOffsetNode(instr.Tuple, instr.Index), |
968 | a.sizeof(instr.Type())) |
969 | |
970 | case *ssa.FieldAddr: |
971 | a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), |
972 | a.offsetOf(mustDeref(instr.X.Type()), instr.Field)) |
973 | |
974 | case *ssa.IndexAddr: |
975 | a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1) |
976 | |
977 | case *ssa.Field: |
978 | a.copy(a.valueNode(instr), |
979 | a.valueOffsetNode(instr.X, instr.Field), |
980 | a.sizeof(instr.Type())) |
981 | |
982 | case *ssa.Index: |
983 | _, isstring := typeparams.CoreType(instr.X.Type()).(*types.Basic) |
984 | if !isstring { |
985 | a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type())) |
986 | } |
987 | |
988 | case *ssa.Select: |
989 | recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1) |
990 | for _, st := range instr.States { |
991 | elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem()) |
992 | switch st.Dir { |
993 | case types.RecvOnly: |
994 | a.genLoad(cgn, recv, st.Chan, 0, elemSize) |
995 | recv += nodeid(elemSize) |
996 | |
997 | case types.SendOnly: |
998 | a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize) |
999 | } |
1000 | } |
1001 | |
1002 | case *ssa.Return: |
1003 | results := a.funcResults(cgn.obj) |
1004 | for _, r := range instr.Results { |
1005 | sz := a.sizeof(r.Type()) |
1006 | a.copy(results, a.valueNode(r), sz) |
1007 | results += nodeid(sz) |
1008 | } |
1009 | |
1010 | case *ssa.Send: |
1011 | a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type())) |
1012 | |
1013 | case *ssa.Store: |
1014 | a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type())) |
1015 | |
1016 | case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface: |
1017 | v := instr.(ssa.Value) |
1018 | a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v)) |
1019 | |
1020 | case *ssa.ChangeInterface: |
1021 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
1022 | |
1023 | case *ssa.TypeAssert: |
1024 | a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true) |
1025 | |
1026 | case *ssa.Slice: |
1027 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
1028 | |
1029 | case *ssa.SliceToArrayPointer: |
1030 | // Going from a []T to a *[k]T (for some k) is a single `dst = src` constraint. |
1031 | // Both []T and *[k]T are modelled as an *IdArrayT where IdArrayT is the identity |
1032 | // node for an array of type T, i.e `type IdArrayT struct{elem T}`. |
1033 | a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) |
1034 | |
1035 | case *ssa.If, *ssa.Jump: |
1036 | // no-op. |
1037 | |
1038 | case *ssa.Phi: |
1039 | sz := a.sizeof(instr.Type()) |
1040 | for _, e := range instr.Edges { |
1041 | a.copy(a.valueNode(instr), a.valueNode(e), sz) |
1042 | } |
1043 | |
1044 | case *ssa.MakeClosure: |
1045 | fn := instr.Fn.(*ssa.Function) |
1046 | a.copy(a.valueNode(instr), a.valueNode(fn), 1) |
1047 | // Free variables are treated like global variables. |
1048 | for i, b := range instr.Bindings { |
1049 | a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type())) |
1050 | } |
1051 | |
1052 | case *ssa.RunDefers: |
1053 | // The analysis is flow insensitive, so we just "call" |
1054 | // defers as we encounter them. |
1055 | |
1056 | case *ssa.Range: |
1057 | // Do nothing. Next{Iter: *ssa.Range} handles this case. |
1058 | |
1059 | case *ssa.Next: |
1060 | if !instr.IsString { |
1061 | // Assumes that Next is always directly applied to a Range result |
1062 | // for a map. |
1063 | |
1064 | // Next results in a destination tuple (ok, dk, dv). |
1065 | // Recall a map is modeled as type *M where M = struct{sk K; sv V}. |
1066 | // Next copies from a src map struct{sk K; sv V} to a dst tuple (ok, dk, dv) |
1067 | // |
1068 | // When keys or value is a blank identifier in a range statement, e.g |
1069 | // for _, v := range m { ... } |
1070 | // or |
1071 | // for _, _ = range m { ... } |
1072 | // we skip copying from sk or dk as there is no use. dk and dv will have |
1073 | // Invalid types if they are blank identifiers. This means that the |
1074 | // size( (ok, dk, dv) ) may differ from 1 + size(struct{sk K; sv V}). |
1075 | // |
1076 | // We encode Next using one load of size sz from an offset in src osrc to an |
1077 | // offset in dst odst. There are 4 cases to consider: |
1078 | // odst | osrc | sz |
1079 | // k, v | 1 | 0 | size(sk) + size(sv) |
1080 | // k, _ | 1 | 0 | size(sk) |
1081 | // _, v | 1+size(dk) | size(sk) | size(sv) |
1082 | // _, _ | 1+size(dk) | size(sk) | 0 |
1083 | |
1084 | // get the source key and value size. Note the source types |
1085 | // may be different than the 3-tuple types, but if this is the |
1086 | // case then the source is assignable to the destination. |
1087 | theMap := instr.Iter.(*ssa.Range).X |
1088 | tMap := theMap.Type().Underlying().(*types.Map) |
1089 | |
1090 | sksize := a.sizeof(tMap.Key()) |
1091 | svsize := a.sizeof(tMap.Elem()) |
1092 | |
1093 | // get the key size of the destination tuple. |
1094 | tTuple := instr.Type().(*types.Tuple) |
1095 | dksize := a.sizeof(tTuple.At(1).Type()) |
1096 | |
1097 | // Load from the map's (k,v) into the tuple's (ok, k, v). |
1098 | osrc := uint32(0) // offset within map object |
1099 | odst := uint32(1) // offset within tuple (initially just after 'ok bool') |
1100 | sz := uint32(0) // amount to copy |
1101 | |
1102 | // Is key valid? |
1103 | if tTuple.At(1).Type() != tInvalid { |
1104 | sz += sksize |
1105 | } else { |
1106 | odst += dksize |
1107 | osrc += sksize |
1108 | } |
1109 | |
1110 | // Is value valid? |
1111 | if tTuple.At(2).Type() != tInvalid { |
1112 | sz += svsize |
1113 | } |
1114 | |
1115 | a.genLoad(cgn, a.valueNode(instr)+nodeid(odst), theMap, osrc, sz) |
1116 | } |
1117 | |
1118 | case *ssa.Lookup: |
1119 | if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok { |
1120 | // CommaOk can be ignored: field 0 is a no-op. |
1121 | ksize := a.sizeof(tMap.Key()) |
1122 | vsize := a.sizeof(tMap.Elem()) |
1123 | a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize) |
1124 | } |
1125 | |
1126 | case *ssa.MapUpdate: |
1127 | tmap := instr.Map.Type().Underlying().(*types.Map) |
1128 | ksize := a.sizeof(tmap.Key()) |
1129 | vsize := a.sizeof(tmap.Elem()) |
1130 | a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize) |
1131 | a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize) |
1132 | |
1133 | case *ssa.Panic: |
1134 | a.copy(a.panicNode, a.valueNode(instr.X), 1) |
1135 | |
1136 | default: |
1137 | panic(fmt.Sprintf("unimplemented: %T", instr)) |
1138 | } |
1139 | } |
1140 | |
1141 | func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode { |
1142 | cgn := &cgnode{fn: fn, obj: obj, callersite: callersite} |
1143 | a.cgnodes = append(a.cgnodes, cgn) |
1144 | return cgn |
1145 | } |
1146 | |
1147 | // genRootCalls generates the synthetic root of the callgraph and the |
1148 | // initial calls from it to the analysis scope, such as main, a test |
1149 | // or a library. |
1150 | func (a *analysis) genRootCalls() *cgnode { |
1151 | r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph") |
1152 | root := a.makeCGNode(r, 0, nil) |
1153 | |
1154 | // TODO(adonovan): make an ssa utility to construct an actual |
1155 | // root function so we don't need to special-case site-less |
1156 | // call edges. |
1157 | |
1158 | // For each main package, call main.init(), main.main(). |
1159 | for _, mainPkg := range a.config.Mains { |
1160 | main := mainPkg.Func("main") |
1161 | if main == nil { |
1162 | panic(fmt.Sprintf("%s has no main function", mainPkg)) |
1163 | } |
1164 | |
1165 | targets := a.addOneNode(main.Signature, "root.targets", nil) |
1166 | site := &callsite{targets: targets} |
1167 | root.sites = append(root.sites, site) |
1168 | for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} { |
1169 | if a.log != nil { |
1170 | fmt.Fprintf(a.log, "\troot call to %s:\n", fn) |
1171 | } |
1172 | a.copy(targets, a.valueNode(fn), 1) |
1173 | } |
1174 | } |
1175 | |
1176 | return root |
1177 | } |
1178 | |
1179 | // genFunc generates constraints for function fn. |
1180 | func (a *analysis) genFunc(cgn *cgnode) { |
1181 | fn := cgn.fn |
1182 | |
1183 | impl := a.findIntrinsic(fn) |
1184 | |
1185 | if a.log != nil { |
1186 | fmt.Fprintf(a.log, "\n\n==== Generating constraints for %s, %s\n", cgn, cgn.contour()) |
1187 | |
1188 | // Hack: don't display body if intrinsic. |
1189 | if impl != nil { |
1190 | fn2 := *cgn.fn // copy |
1191 | fn2.Locals = nil |
1192 | fn2.Blocks = nil |
1193 | fn2.WriteTo(a.log) |
1194 | } else { |
1195 | cgn.fn.WriteTo(a.log) |
1196 | } |
1197 | } |
1198 | |
1199 | if impl != nil { |
1200 | impl(a, cgn) |
1201 | return |
1202 | } |
1203 | |
1204 | if fn.Blocks == nil { |
1205 | // External function with no intrinsic treatment. |
1206 | // We'll warn about calls to such functions at the end. |
1207 | return |
1208 | } |
1209 | |
1210 | if fn.TypeParams().Len() > 0 && len(fn.TypeArgs()) == 0 { |
1211 | // Body of generic function. |
1212 | // We'll warn about calls to such functions at the end. |
1213 | return |
1214 | } |
1215 | |
1216 | if strings.HasPrefix(fn.Synthetic, "instantiation wrapper ") { |
1217 | // instantiation wrapper of a generic function. |
1218 | // These may contain type coercions which are not currently supported. |
1219 | // We'll warn about calls to such functions at the end. |
1220 | return |
1221 | } |
1222 | |
1223 | if a.log != nil { |
1224 | fmt.Fprintln(a.log, "; Creating nodes for local values") |
1225 | } |
1226 | |
1227 | a.localval = make(map[ssa.Value]nodeid) |
1228 | a.localobj = make(map[ssa.Value]nodeid) |
1229 | |
1230 | // The value nodes for the params are in the func object block. |
1231 | params := a.funcParams(cgn.obj) |
1232 | for _, p := range fn.Params { |
1233 | a.setValueNode(p, params, cgn) |
1234 | params += nodeid(a.sizeof(p.Type())) |
1235 | } |
1236 | |
1237 | // Free variables have global cardinality: |
1238 | // the outer function sets them with MakeClosure; |
1239 | // the inner function accesses them with FreeVar. |
1240 | // |
1241 | // TODO(adonovan): treat free vars context-sensitively. |
1242 | |
1243 | // Create value nodes for all value instructions |
1244 | // since SSA may contain forward references. |
1245 | var space [10]*ssa.Value |
1246 | for _, b := range fn.Blocks { |
1247 | for _, instr := range b.Instrs { |
1248 | switch instr := instr.(type) { |
1249 | case *ssa.Range: |
1250 | // do nothing: it has a funky type, |
1251 | // and *ssa.Next does all the work. |
1252 | |
1253 | case ssa.Value: |
1254 | var comment string |
1255 | if a.log != nil { |
1256 | comment = instr.Name() |
1257 | } |
1258 | id := a.addNodes(instr.Type(), comment) |
1259 | a.setValueNode(instr, id, cgn) |
1260 | } |
1261 | |
1262 | // Record all address-taken functions (for presolver). |
1263 | rands := instr.Operands(space[:0]) |
1264 | if call, ok := instr.(ssa.CallInstruction); ok && !call.Common().IsInvoke() { |
1265 | // Skip CallCommon.Value in "call" mode. |
1266 | // TODO(adonovan): fix: relies on unspecified ordering. Specify it. |
1267 | rands = rands[1:] |
1268 | } |
1269 | for _, rand := range rands { |
1270 | if atf, ok := (*rand).(*ssa.Function); ok { |
1271 | a.atFuncs[atf] = true |
1272 | } |
1273 | } |
1274 | } |
1275 | } |
1276 | |
1277 | // Generate constraints for instructions. |
1278 | for _, b := range fn.Blocks { |
1279 | for _, instr := range b.Instrs { |
1280 | a.genInstr(cgn, instr) |
1281 | } |
1282 | } |
1283 | |
1284 | a.localval = nil |
1285 | a.localobj = nil |
1286 | } |
1287 | |
1288 | // genMethodsOf generates nodes and constraints for all methods of type T. |
1289 | func (a *analysis) genMethodsOf(T types.Type) { |
1290 | itf := isInterface(T) |
1291 | |
1292 | // TODO(adonovan): can we skip this entirely if itf is true? |
1293 | // I think so, but the answer may depend on reflection. |
1294 | mset := a.prog.MethodSets.MethodSet(T) |
1295 | for i, n := 0, mset.Len(); i < n; i++ { |
1296 | m := a.prog.MethodValue(mset.At(i)) |
1297 | a.valueNode(m) |
1298 | |
1299 | if !itf { |
1300 | // Methods of concrete types are address-taken functions. |
1301 | a.atFuncs[m] = true |
1302 | } |
1303 | } |
1304 | } |
1305 | |
1306 | // generate generates offline constraints for the entire program. |
1307 | func (a *analysis) generate() { |
1308 | start("Constraint generation") |
1309 | if a.log != nil { |
1310 | fmt.Fprintln(a.log, "==== Generating constraints") |
1311 | } |
1312 | |
1313 | // Create a dummy node since we use the nodeid 0 for |
1314 | // non-pointerlike variables. |
1315 | a.addNodes(tInvalid, "(zero)") |
1316 | |
1317 | // Create the global node for panic values. |
1318 | a.panicNode = a.addNodes(tEface, "panic") |
1319 | |
1320 | // Create nodes and constraints for all methods of reflect.rtype. |
1321 | // (Shared contours are used by dynamic calls to reflect.Type |
1322 | // methods---typically just String().) |
1323 | if rtype := a.reflectRtypePtr; rtype != nil { |
1324 | a.genMethodsOf(rtype) |
1325 | } |
1326 | |
1327 | root := a.genRootCalls() |
1328 | |
1329 | if a.config.BuildCallGraph { |
1330 | a.result.CallGraph = callgraph.New(root.fn) |
1331 | } |
1332 | |
1333 | // Create nodes and constraints for all methods of all types |
1334 | // that are dynamically accessible via reflection or interfaces. |
1335 | for _, T := range a.prog.RuntimeTypes() { |
1336 | a.genMethodsOf(T) |
1337 | } |
1338 | |
1339 | // Generate constraints for functions as they become reachable |
1340 | // from the roots. (No constraints are generated for functions |
1341 | // that are dead in this analysis scope.) |
1342 | for len(a.genq) > 0 { |
1343 | cgn := a.genq[0] |
1344 | a.genq = a.genq[1:] |
1345 | a.genFunc(cgn) |
1346 | } |
1347 | |
1348 | // The runtime magically allocates os.Args; so should we. |
1349 | if os := a.prog.ImportedPackage("os"); os != nil { |
1350 | // In effect: os.Args = new([1]string)[:] |
1351 | T := types.NewSlice(types.Typ[types.String]) |
1352 | obj := a.addNodes(sliceToArray(T), "<command-line args>") |
1353 | a.endObject(obj, nil, "<command-line args>") |
1354 | a.addressOf(T, a.objectNode(nil, os.Var("Args")), obj) |
1355 | } |
1356 | |
1357 | // Discard generation state, to avoid confusion after node renumbering. |
1358 | a.panicNode = 0 |
1359 | a.globalval = nil |
1360 | a.localval = nil |
1361 | a.localobj = nil |
1362 | |
1363 | stop("Constraint generation") |
1364 | } |
1365 |
Members