summary refs log tree commit diff stats
path: root/compiler/patterns.nim
blob: af259c916b9feff9da72e30aad9a96ba81d0529f (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
#
#
#           The Nimrod Compiler
#        (c) Copyright 2012 Andreas Rumpf
#
#    See the file "copying.txt", included in this
#    distribution, for details about the copyright.
#

## This module implements the pattern matching features for term rewriting
## macro support.

import
  ast, astalgo, types, semdata, sigmatch, msgs, idents, aliases, parampatterns,
  trees

type
  TPatternContext = object
    owner: PSym
    mapping: seq[PNode]  # maps formal parameters to nodes
    formals: int
    c: PContext
    subMatch: bool       # subnode matches are special
  PPatternContext = var TPatternContext

proc getLazy(c: PPatternContext, sym: PSym): PNode =
  if not isNil(c.mapping):
    result = c.mapping[sym.position]

proc putLazy(c: PPatternContext, sym: PSym, n: PNode) =
  if isNil(c.mapping): newSeq(c.mapping, c.formals)
  c.mapping[sym.position] = n

proc matches(c: PPatternContext, p, n: PNode): bool

proc canonKind(n: PNode): TNodeKind =
  ## nodekind canonilization for pattern matching
  result = n.kind
  case result
  of nkCallKinds: result = nkCall
  of nkStrLit..nkTripleStrLit: result = nkStrLit
  of nkFastAsgn: result = nkAsgn
  else: nil

proc sameKinds(a, b: PNode): bool {.inline.} =
  result = a.kind == b.kind or a.canonKind == b.canonKind

proc sameTrees(a, b: PNode): bool =
  if sameKinds(a, b):
    case a.kind
    of nkSym: result = a.sym == b.sym
    of nkIdent: result = a.ident.id == b.ident.id
    of nkCharLit..nkInt64Lit: result = a.intVal == b.intVal
    of nkFloatLit..nkFloat64Lit: result = a.floatVal == b.floatVal
    of nkStrLit..nkTripleStrLit: result = a.strVal == b.strVal
    of nkEmpty, nkNilLit: result = true
    of nkType: result = sameTypeOrNil(a.typ, b.typ)
    else:
      if sonsLen(a) == sonsLen(b):
        for i in countup(0, sonsLen(a) - 1):
          if not sameTrees(a.sons[i], b.sons[i]): return
        result = true

proc inSymChoice(sc, x: PNode): bool =
  if sc.kind == nkClosedSymChoice:
    for i in 0.. <sc.len:
      if sc.sons[i].sym == x.sym: return true
  elif sc.kind == nkOpenSymChoice:
    # same name suffices for open sym choices!
    result = sc.sons[0].sym.name.id == x.sym.name.id
  
proc checkTypes(c: PPatternContext, p: PSym, n: PNode): bool =
  # check param constraints first here as this is quite optimized:
  if p.typ.constraint != nil:
    result = matchNodeKinds(p.typ.constraint, n)
    if not result: return
  if isNil(n.typ):
    result = p.typ.kind in {tyEmpty, tyStmt}
  else:
    result = sigmatch.argtypeMatches(c.c, p.typ, n.typ)

proc isPatternParam(c: PPatternContext, p: PNode): bool {.inline.} =
  result = p.kind == nkSym and p.sym.kind == skParam and p.sym.owner == c.owner

proc matchChoice(c: PPatternContext, p, n: PNode): bool =
  for i in 1 .. <p.len:
    if matches(c, p.sons[i], n): return true

proc bindOrCheck(c: PPatternContext, param: PSym, n: PNode): bool =
  var pp = GetLazy(c, param)
  if pp != nil:
    # check if we got the same pattern (already unified):
    result = sameTrees(pp, n) #matches(c, pp, n)
  elif n.kind == nkArgList or checkTypes(c, param, n):
    PutLazy(c, param, n)
    result = true

proc gather(c: PPatternContext, param: PSym, n: PNode) =
  var pp = GetLazy(c, param)
  if pp != nil and pp.kind == nkArgList:
    pp.add(n)
  else:
    pp = newNodeI(nkArgList, n.info, 1)
    pp.sons[0] = n
    PutLazy(c, param, pp)

proc matchNested(c: PPatternContext, p, n: PNode, rpn: bool): bool =
  # match ``op * param`` or ``op *| param``
  proc matchStarAux(c: PPatternContext, op, n, arglist: PNode,
                    rpn: bool): bool =
    result = true
    if n.kind in nkCallKinds and matches(c, op.sons[1], n.sons[0]):
      for i in 1..sonsLen(n)-1:
        if not matchStarAux(c, op, n[i], arglist, rpn): return false
      if rpn: arglist.add(n.sons[0])
    elif n.kind == nkHiddenStdConv and n.sons[1].kind == nkBracket:
      let n = n.sons[1]
      for i in 0.. <n.len: 
        if not matchStarAux(c, op, n[i], arglist, rpn): return false
    elif checkTypes(c, p.sons[2].sym, n):
      add(arglist, n)
    else:
      result = false
    
  if n.kind notin nkCallKinds: return false
  if matches(c, p.sons[1], n.sons[0]):
    var arglist = newNodeI(nkArgList, n.info)
    if matchStarAux(c, p, n, arglist, rpn):
      result = bindOrCheck(c, p.sons[2].sym, arglist)

proc matches(c: PPatternContext, p, n: PNode): bool =
  # hidden conversions (?)
  if isPatternParam(c, p):
    result = bindOrCheck(c, p.sym, n)
  elif n.kind == nkSym and p.kind == nkIdent:
    result = p.ident.id == n.sym.name.id
  elif n.kind == nkSym and inSymChoice(p, n):
    result = true
  elif n.kind == nkSym and n.sym.kind == skConst:
    # try both:
    if p.kind == nkSym: result = p.sym == n.sym
    elif matches(c, p, n.sym.ast): result = true
  elif p.kind == nkPattern:
    # pattern operators: | *
    let opr = p.sons[0].ident.s
    case opr
    of "|": result = matchChoice(c, p, n)
    of "*": result = matchNested(c, p, n, rpn=false)
    of "**": result = matchNested(c, p, n, rpn=true)
    of "~": result = not matches(c, p.sons[1], n)
    else: InternalError(p.info, "invalid pattern")
    # template {add(a, `&` * b)}(a: string{noalias}, b: varargs[string]) = 
    #   add(a, b)
  elif p.kind == nkCurlyExpr:
    if p.sons[1].kind == nkPrefix:
      if matches(c, p.sons[0], n):
        gather(c, p.sons[1].sons[1].sym, n)
        result = true
    else:
      assert isPatternParam(c, p.sons[1])
      if matches(c, p.sons[0], n):
        result = bindOrCheck(c, p.sons[1].sym, n)
  elif sameKinds(p, n):
    case p.kind
    of nkSym: result = p.sym == n.sym
    of nkIdent: result = p.ident.id == n.ident.id
    of nkCharLit..nkInt64Lit: result = p.intVal == n.intVal
    of nkFloatLit..nkFloat64Lit: result = p.floatVal == n.floatVal
    of nkStrLit..nkTripleStrLit: result = p.strVal == n.strVal
    of nkEmpty, nkNilLit, nkType:
      result = true
    else:
      var plen = sonsLen(p)
      # special rule for p(X) ~ f(...); this also works for stuff like
      # partial case statements, etc! - Not really ... :-/
      let v = lastSon(p)
      if isPatternParam(c, v) and v.sym.typ.kind == tyVarargs:
        var arglist: PNode
        if plen <= sonsLen(n):
          for i in countup(0, plen - 2):
            if not matches(c, p.sons[i], n.sons[i]): return
          if plen == sonsLen(n) and lastSon(n).kind == nkHiddenStdConv and
              lastSon(n).sons[1].kind == nkBracket:
            # unpack varargs:
            let n = lastSon(n).sons[1]
            arglist = newNodeI(nkArgList, n.info, n.len)
            for i in 0.. <n.len: arglist.sons[i] = n.sons[i]
          else:
            arglist = newNodeI(nkArgList, n.info, sonsLen(n) - plen + 1)
            # f(1, 2, 3)
            # p(X)
            for i in countup(0, sonsLen(n) - plen):
              arglist.sons[i] = n.sons[i + plen - 1]
          return bindOrCheck(c, v.sym, arglist)
        elif plen-1 == sonsLen(n):
          for i in countup(0, plen - 2):
            if not matches(c, p.sons[i], n.sons[i]): return
          arglist = newNodeI(nkArgList, n.info)
          return bindOrCheck(c, v.sym, arglist)
      if plen == sonsLen(n):
        for i in countup(0, sonsLen(p) - 1):
          if not matches(c, p.sons[i], n.sons[i]): return
        result = true

proc matchStmtList(c: PPatternContext, p, n: PNode): PNode =
  proc matchRange(c: PPatternContext, p, n: PNode, i: int): bool =
    for j in 0 .. <p.len:
      if not matches(c, p.sons[j], n.sons[i+j]):
        # we need to undo any bindings:
        if not isNil(c.mapping): c.mapping = nil
        return false
    result = true
  
  if p.kind == nkStmtList and n.kind == p.kind and p.len < n.len:
    let n = flattenStmts(n)
    # no need to flatten 'p' here as that has already been done
    for i in 0 .. n.len - p.len:
      if matchRange(c, p, n, i):
        c.subMatch = true
        result = newNodeI(nkStmtList, n.info, 3)
        result.sons[0] = extractRange(nkStmtList, n, 0, i-1)
        result.sons[1] = extractRange(nkStmtList, n, i, i+p.len-1)
        result.sons[2] = extractRange(nkStmtList, n, i+p.len, n.len-1)
        break
  elif matches(c, p, n):
    result = n

proc aliasAnalysisRequested(params: PNode): bool =
  if params.len >= 2:
    for i in 1 .. < params.len:
      let param = params.sons[i].sym
      if whichAlias(param) != aqNone: return true

proc addToArgList(result, n: PNode) =
  if n.typ != nil and n.typ.kind != tyStmt:
    if n.kind != nkArgList: result.add(n)
    else:
      for i in 0 .. <n.len: result.add(n.sons[i])

proc applyRule*(c: PContext, s: PSym, n: PNode): PNode =
  ## returns a tree to semcheck if the rule triggered; nil otherwise
  var ctx: TPatternContext
  ctx.owner = s
  ctx.c = c
  ctx.formals = sonsLen(s.typ)-1
  var m = matchStmtList(ctx, s.ast.sons[patternPos], n)
  if isNil(m): return nil
  # each parameter should have been bound; we simply setup a call and
  # let semantic checking deal with the rest :-)
  result = newNodeI(nkCall, n.info)
  result.add(newSymNode(s, n.info))
  let params = s.typ.n
  let requiresAA = aliasAnalysisRequested(params)
  var args: PNode
  if requiresAA:
    args = newNodeI(nkArgList, n.info)
  for i in 1 .. < params.len:
    let param = params.sons[i].sym
    let x = GetLazy(ctx, param)
    # couldn't bind parameter:
    if isNil(x): return nil
    result.add(x)
    if requiresAA: addToArgList(args, n)
  # perform alias analysis here:
  if requiresAA:
    for i in 1 .. < params.len:
      var rs = result.sons[i]
      let param = params.sons[i].sym
      case whichAlias(param)
      of aqNone: nil
      of aqShouldAlias:
        # it suffices that it aliases for sure with *some* other param:
        var ok = false
        for arg in items(args):
          if arg != rs and aliases.isPartOf(rs, arg) == arYes:
            ok = true
            break
        # constraint not fullfilled:
        if not ok: return nil
      of aqNoAlias:
        # it MUST not alias with any other param:
        var ok = true
        for arg in items(args):
          if arg != rs and aliases.isPartOf(rs, arg) != arNo:
            ok = false
            break
        # constraint not fullfilled:
        if not ok: return nil

  markUsed(n, s)
  if ctx.subMatch:
    assert m.len == 3
    m.sons[1] = result
    result = m
s="p">, x, y: PNode) = var hasCase = isCaseObj(t.n) var obj = t while obj.len > 0 and obj[0] != nil: obj = skipTypes(obj[0], abstractPtrs) hasCase = hasCase or isCaseObj(obj.n) if hasCase and c.kind in {attachedAsgn, attachedDeepCopy}: # assignment for case objects is complex, we do: # =destroy(dest) # wasMoved(dest) # for every field: # `=` dest.field, src.field # ^ this is what we used to do, but for 'result = result.sons[0]' it # destroys 'result' too early. # So this is what we really need to do: # let blob {.cursor.} = dest # remembers the old dest.kind # wasMoved(dest) # dest.kind = src.kind # for every field (dependent on dest.kind): # `=` dest.field, src.field # =destroy(blob) var dummy = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId c.idgen, c.fn, c.info) dummy.typ = y.typ if ccgIntroducedPtr(c.g.config, dummy, y.typ): # Because of potential aliasing when the src param is passed by ref, we need to check for equality here, # because the wasMoved(dest) call would zero out src, if dest aliases src. var cond = newTree(nkCall, newSymNode(c.g.getSysMagic(c.info, "==", mEqRef)), newTreeIT(nkAddr, c.info, makePtrType(c.fn, x.typ, c.idgen), x), newTreeIT(nkAddr, c.info, makePtrType(c.fn, y.typ, c.idgen), y)) cond.typ = getSysType(c.g, x.info, tyBool) body.add genIf(c, cond, newTreeI(nkReturnStmt, c.info, newNodeI(nkEmpty, c.info))) var temp = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId c.idgen, c.fn, c.info) temp.typ = x.typ incl(temp.flags, sfFromGeneric) var v = newNodeI(nkVarSection, c.info) let blob = newSymNode(temp) v.addVar(blob, x) body.add v #body.add newAsgnStmt(blob, x) var wasMovedCall = newNodeI(nkCall, c.info) wasMovedCall.add(newSymNode(createMagic(c.g, c.idgen, "wasMoved", mWasMoved))) wasMovedCall.add x # mWasMoved does not take the address body.add wasMovedCall fillBodyObjTImpl(c, t, body, x, y) when false: # does not work yet due to phase-ordering problems: assert t.destructor != nil body.add destructorCall(c.g, t.destructor, blob) let prevKind = c.kind c.kind = attachedDestructor fillBodyObjTImpl(c, t, body, blob, y) c.kind = prevKind else: fillBodyObjTImpl(c, t, body, x, y) proc boolLit*(g: ModuleGraph; info: TLineInfo; value: bool): PNode = result = newIntLit(g, info, ord value) result.typ = getSysType(g, info, tyBool) proc getCycleParam(c: TLiftCtx): PNode = assert c.kind == attachedAsgn if c.fn.typ.len == 4: result = c.fn.typ.n.lastSon assert result.kind == nkSym assert result.sym.name.s == "cyclic" else: result = boolLit(c.g, c.info, true) proc newHookCall(c: var TLiftCtx; op: PSym; x, y: PNode): PNode = #if sfError in op.flags: # localError(c.config, x.info, "usage of '$1' is a user-defined error" % op.name.s) result = newNodeI(nkCall, x.info) result.add newSymNode(op) if sfNeverRaises notin op.flags: c.canRaise = true if op.typ.sons[1].kind == tyVar: result.add genAddr(c, x) else: result.add x if y != nil: result.add y if op.typ.len == 4: assert y != nil if c.fn.typ.len == 4: result.add getCycleParam(c) else: # assume the worst: A cycle is created: result.add boolLit(c.g, y.info, true) proc newOpCall(c: var TLiftCtx; op: PSym; x: PNode): PNode = result = newNodeIT(nkCall, x.info, op.typ[0]) result.add(newSymNode(op)) result.add x if sfNeverRaises notin op.flags: c.canRaise = true proc newDeepCopyCall(c: var TLiftCtx; op: PSym; x, y: PNode): PNode = result = newAsgnStmt(x, newOpCall(c, op, y)) proc usesBuiltinArc(t: PType): bool = proc wrap(t: PType): bool {.nimcall.} = ast.isGCedMem(t) result = types.searchTypeFor(t, wrap) proc useNoGc(c: TLiftCtx; t: PType): bool {.inline.} = result = optSeqDestructors in c.g.config.globalOptions and ({tfHasGCedMem, tfHasOwned} * t.flags != {} or usesBuiltinArc(t)) proc requiresDestructor(c: TLiftCtx; t: PType): bool {.inline.} = result = optSeqDestructors in c.g.config.globalOptions and containsGarbageCollectedRef(t) proc instantiateGeneric(c: var TLiftCtx; op: PSym; t, typeInst: PType): PSym = if c.c != nil and typeInst != nil: result = c.c.instTypeBoundOp(c.c, op, typeInst, c.info, attachedAsgn, 1) else: localError(c.g.config, c.info, "cannot generate destructor for generic type: " & typeToString(t)) result = nil proc considerAsgnOrSink(c: var TLiftCtx; t: PType; body, x, y: PNode; field: var PSym): bool = if optSeqDestructors in c.g.config.globalOptions: var op = field let destructorOverriden = destructorOverriden(c.g, t) if op != nil and op != c.fn and (sfOverriden in op.flags or destructorOverriden): if sfError in op.flags: incl c.fn.flags, sfError #else: # markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add newHookCall(c, op, x, y) result = true elif op == nil and destructorOverriden: op = produceSym(c.g, c.c, t, c.kind, c.info, c.idgen) body.add newHookCall(c, op, x, y) result = true elif tfHasAsgn in t.flags: var op: PSym if sameType(t, c.asgnForType): # generate recursive call: if c.recurse: op = c.fn else: c.recurse = true return false else: op = field if op == nil: op = produceSym(c.g, c.c, t, c.kind, c.info, c.idgen) if sfError in op.flags: incl c.fn.flags, sfError #else: # markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) # We also now do generic instantiations in the destructor lifting pass: if op.ast.isGenericRoutine: op = instantiateGeneric(c, op, t, t.typeInst) field = op #echo "trying to use ", op.ast #echo "for ", op.name.s, " " #debug(t) #return false assert op.ast[genericParamsPos].kind == nkEmpty body.add newHookCall(c, op, x, y) result = true proc addDestructorCall(c: var TLiftCtx; orig: PType; body, x: PNode) = let t = orig.skipTypes(abstractInst - {tyDistinct}) var op = t.destructor if op != nil and sfOverriden in op.flags: if op.ast.isGenericRoutine: # patch generic destructor: op = instantiateGeneric(c, op, t, t.typeInst) setAttachedOp(c.g, c.idgen.module, t, attachedDestructor, op) if op == nil and (useNoGc(c, t) or requiresDestructor(c, t)): op = produceSym(c.g, c.c, t, attachedDestructor, c.info, c.idgen) doAssert op != nil doAssert op == t.destructor if op != nil: #markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add destructorCall(c, op, x) elif useNoGc(c, t): internalError(c.g.config, c.info, "type-bound operator could not be resolved") proc considerUserDefinedOp(c: var TLiftCtx; t: PType; body, x, y: PNode): bool = case c.kind of attachedDestructor: var op = t.destructor if op != nil and sfOverriden in op.flags: if op.ast.isGenericRoutine: # patch generic destructor: op = instantiateGeneric(c, op, t, t.typeInst) setAttachedOp(c.g, c.idgen.module, t, attachedDestructor, op) #markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add destructorCall(c, op, x) result = true #result = addDestructorCall(c, t, body, x) of attachedAsgn, attachedSink, attachedTrace: var op = getAttachedOp(c.g, t, c.kind) result = considerAsgnOrSink(c, t, body, x, y, op) if op != nil: setAttachedOp(c.g, c.idgen.module, t, c.kind, op) of attachedDispose: var op = getAttachedOp(c.g, t, c.kind) result = considerAsgnOrSink(c, t, body, x, nil, op) if op != nil: setAttachedOp(c.g, c.idgen.module, t, c.kind, op) of attachedDeepCopy: let op = getAttachedOp(c.g, t, attachedDeepCopy) if op != nil: #markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add newDeepCopyCall(c, op, x, y) result = true proc declareCounter(c: var TLiftCtx; body: PNode; first: BiggestInt): PNode = var temp = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId(c.idgen), c.fn, c.info) temp.typ = getSysType(c.g, body.info, tyInt) incl(temp.flags, sfFromGeneric) var v = newNodeI(nkVarSection, c.info) result = newSymNode(temp) v.addVar(result, lowerings.newIntLit(c.g, body.info, first)) body.add v proc declareTempOf(c: var TLiftCtx; body: PNode; value: PNode): PNode = var temp = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId(c.idgen), c.fn, c.info) temp.typ = value.typ incl(temp.flags, sfFromGeneric) var v = newNodeI(nkVarSection, c.info) result = newSymNode(temp) v.addVar(result, value) body.add v proc addIncStmt(c: var TLiftCtx; body, i: PNode) = let incCall = genBuiltin(c, mInc, "inc", i) incCall.add lowerings.newIntLit(c.g, c.info, 1) body.add incCall proc newSeqCall(c: var TLiftCtx; x, y: PNode): PNode = # don't call genAddr(c, x) here: result = genBuiltin(c, mNewSeq, "newSeq", x) let lenCall = genBuiltin(c, mLengthSeq, "len", y) lenCall.typ = getSysType(c.g, x.info, tyInt) result.add lenCall proc setLenStrCall(c: var TLiftCtx; x, y: PNode): PNode = let lenCall = genBuiltin(c, mLengthStr, "len", y) lenCall.typ = getSysType(c.g, x.info, tyInt) result = genBuiltin(c, mSetLengthStr, "setLen", x) # genAddr(g, x)) result.add lenCall proc setLenSeqCall(c: var TLiftCtx; t: PType; x, y: PNode): PNode = let lenCall = genBuiltin(c, mLengthSeq, "len", y) lenCall.typ = getSysType(c.g, x.info, tyInt) var op = getSysMagic(c.g, x.info, "setLen", mSetLengthSeq) op = instantiateGeneric(c, op, t, t) result = newTree(nkCall, newSymNode(op, x.info), x, lenCall) proc forallElements(c: var TLiftCtx; t: PType; body, x, y: PNode) = let counterIdx = body.len let i = declareCounter(c, body, toInt64(firstOrd(c.g.config, t))) let whileLoop = genWhileLoop(c, i, x) let elemType = t.lastSon let b = if c.kind == attachedTrace: y else: y.at(i, elemType) fillBody(c, elemType, whileLoop[1], x.at(i, elemType), b) if whileLoop[1].len > 0: addIncStmt(c, whileLoop[1], i) body.add whileLoop else: body.sons.setLen counterIdx proc fillSeqOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = case c.kind of attachedAsgn, attachedDeepCopy: # we generate: # setLen(dest, y.len) # var i = 0 # while i < y.len: dest[i] = y[i]; inc(i) # This is usually more efficient than a destroy/create pair. body.add setLenSeqCall(c, t, x, y) forallElements(c, t, body, x, y) of attachedSink: let moveCall = genBuiltin(c, mMove, "move", x) moveCall.add y doAssert t.destructor != nil moveCall.add destructorCall(c, t.destructor, x) body.add moveCall of attachedDestructor: # destroy all elements: forallElements(c, t, body, x, y) body.add genBuiltin(c, mDestroy, "destroy", x) of attachedTrace: if canFormAcycle(t.elemType): # follow all elements: forallElements(c, t, body, x, y) of attachedDispose: forallElements(c, t, body, x, y) body.add genBuiltin(c, mDestroy, "destroy", x) proc useSeqOrStrOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = createTypeBoundOps(c.g, c.c, t, body.info, c.idgen) # recursions are tricky, so we might need to forward the generated # operation here: var t = t if t.assignment == nil or t.destructor == nil: let h = sighashes.hashType(t, {CoType, CoConsiderOwned, CoDistinct}) let canon = c.g.canonTypes.getOrDefault(h) if canon != nil: t = canon case c.kind of attachedAsgn, attachedDeepCopy: # XXX: replace these with assertions. if t.assignment == nil: return # protect from recursion body.add newHookCall(c, t.assignment, x, y) of attachedSink: # we always inline the move for better performance: let moveCall = genBuiltin(c, mMove, "move", x) moveCall.add y doAssert t.destructor != nil moveCall.add destructorCall(c, t.destructor, x) body.add moveCall # alternatively we could do this: when false: doAssert t.asink != nil body.add newHookCall(c, t.asink, x, y) of attachedDestructor: doAssert t.destructor != nil body.add destructorCall(c, t.destructor, x) of attachedTrace: if t.kind != tyString and canFormAcycle(t.elemType): let op = getAttachedOp(c.g, t, c.kind) if op == nil: return # protect from recursion body.add newHookCall(c, op, x, y) of attachedDispose: let op = getAttachedOp(c.g, t, c.kind) if op == nil: return # protect from recursion body.add newHookCall(c, op, x, nil) proc fillStrOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = case c.kind of attachedAsgn, attachedDeepCopy: body.add callCodegenProc(c.g, "nimAsgnStrV2", c.info, genAddr(c, x), y) of attachedSink: let moveCall = genBuiltin(c, mMove, "move", x) moveCall.add y doAssert t.destructor != nil moveCall.add destructorCall(c, t.destructor, x) body.add moveCall of attachedDestructor, attachedDispose: body.add genBuiltin(c, mDestroy, "destroy", x) of attachedTrace: discard "strings are atomic and have no inner elements that are to trace" proc cyclicType*(t: PType): bool = case t.kind of tyRef: result = types.canFormAcycle(t.lastSon) of tyProc: result = t.callConv == ccClosure else: result = false proc atomicRefOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = #[ bug #15753 is really subtle. Usually the classical write barrier for reference counting looks like this:: incRef source # increment first; this takes care of self-assignments1 decRef dest dest[] = source However, 'decRef dest' might trigger a cycle collection and then the collector traverses the graph. It is crucial that when it follows the pointers the assignment 'dest[] = source' already happened so that we don't do trial deletion on a wrong graph -- this causes premature freeing of objects! The correct barrier looks like this:: let tmp = dest incRef source dest[] = source decRef tmp ]# var actions = newNodeI(nkStmtList, c.info) let elemType = t.lastSon createTypeBoundOps(c.g, c.c, elemType, c.info, c.idgen) let isCyclic = c.g.config.selectedGC == gcOrc and types.canFormAcycle(elemType) let tmp = if isCyclic and c.kind in {attachedAsgn, attachedSink}: declareTempOf(c, body, x) else: x if isFinal(elemType): addDestructorCall(c, elemType, actions, genDeref(tmp, nkDerefExpr)) var alignOf = genBuiltin(c, mAlignOf, "alignof", newNodeIT(nkType, c.info, elemType)) alignOf.typ = getSysType(c.g, c.info, tyInt) actions.add callCodegenProc(c.g, "nimRawDispose", c.info, tmp, alignOf) else: addDestructorCall(c, elemType, newNodeI(nkStmtList, c.info), genDeref(tmp, nkDerefExpr)) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, tmp) var cond: PNode if isCyclic: if isFinal(elemType): let typInfo = genBuiltin(c, mGetTypeInfoV2, "getTypeInfoV2", newNodeIT(nkType, x.info, elemType)) typInfo.typ = getSysType(c.g, c.info, tyPointer) cond = callCodegenProc(c.g, "nimDecRefIsLastCyclicStatic", c.info, tmp, typInfo) else: cond = callCodegenProc(c.g, "nimDecRefIsLastCyclicDyn", c.info, tmp) else: cond = callCodegenProc(c.g, "nimDecRefIsLast", c.info, x) cond.typ = getSysType(c.g, x.info, tyBool) case c.kind of attachedSink: if isCyclic: body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedAsgn: if isCyclic: body.add genIf(c, y, callCodegenProc(c.g, "nimIncRefCyclic", c.info, y, getCycleParam(c))) body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, y, callCodegenProc(c.g, "nimIncRef", c.info, y)) body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, cond, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: if isCyclic: if isFinal(elemType): let typInfo = genBuiltin(c, mGetTypeInfoV2, "getTypeInfoV2", newNodeIT(nkType, x.info, elemType)) typInfo.typ = getSysType(c.g, c.info, tyPointer) body.add callCodegenProc(c.g, "nimTraceRef", c.info, genAddrOf(x, c.idgen), typInfo, y) else: # If the ref is polymorphic we have to account for this body.add callCodegenProc(c.g, "nimTraceRefDyn", c.info, genAddrOf(x, c.idgen), y) #echo "can follow ", elemType, " static ", isFinal(elemType) of attachedDispose: # this is crucial! dispose is like =destroy but we don't follow refs # as that is dealt within the cycle collector. if not isCyclic: body.add genIf(c, cond, actions) when false: let cond = copyTree(x) cond.typ = getSysType(c.g, x.info, tyBool) actions.add callCodegenProc(c.g, "nimRawDispose", c.info, x) body.add genIf(c, cond, actions) proc atomicClosureOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = ## Closures are really like refs except they always use a virtual destructor ## and we need to do the refcounting only on the ref field which we call 'xenv': let xenv = genBuiltin(c, mAccessEnv, "accessEnv", x) xenv.typ = getSysType(c.g, c.info, tyPointer) let isCyclic = c.g.config.selectedGC == gcOrc let tmp = if isCyclic and c.kind in {attachedAsgn, attachedSink}: declareTempOf(c, body, xenv) else: xenv var actions = newNodeI(nkStmtList, c.info) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, tmp) let decRefProc = if isCyclic: "nimDecRefIsLastCyclicDyn" else: "nimDecRefIsLast" let cond = callCodegenProc(c.g, decRefProc, c.info, tmp) cond.typ = getSysType(c.g, x.info, tyBool) case c.kind of attachedSink: if isCyclic: body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedAsgn: let yenv = genBuiltin(c, mAccessEnv, "accessEnv", y) yenv.typ = getSysType(c.g, c.info, tyPointer) if isCyclic: body.add genIf(c, yenv, callCodegenProc(c.g, "nimIncRefCyclic", c.info, yenv, getCycleParam(c))) body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, yenv, callCodegenProc(c.g, "nimIncRef", c.info, yenv)) body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, cond, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: body.add callCodegenProc(c.g, "nimTraceRefDyn", c.info, genAddrOf(xenv, c.idgen), y) of attachedDispose: # this is crucial! dispose is like =destroy but we don't follow refs # as that is dealt within the cycle collector. when false: let cond = copyTree(xenv) cond.typ = getSysType(c.g, xenv.info, tyBool) actions.add callCodegenProc(c.g, "nimRawDispose", c.info, xenv) body.add genIf(c, cond, actions) proc weakrefOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = case c.kind of attachedSink: # we 'nil' y out afterwards so we *need* to take over its reference # count value: body.add genIf(c, x, callCodegenProc(c.g, "nimDecWeakRef", c.info, x)) body.add newAsgnStmt(x, y) of attachedAsgn: body.add genIf(c, y, callCodegenProc(c.g, "nimIncRef", c.info, y)) body.add genIf(c, x, callCodegenProc(c.g, "nimDecWeakRef", c.info, x)) body.add newAsgnStmt(x, y) of attachedDestructor: # it's better to prepend the destruction of weak refs in order to # prevent wrong "dangling refs exist" problems: var actions = newNodeI(nkStmtList, c.info) actions.add callCodegenProc(c.g, "nimDecWeakRef", c.info, x) let des = genIf(c, x, actions) if body.len == 0: body.add des else: body.sons.insert(des, 0) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace, attachedDispose: discard proc ownedRefOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = var actions = newNodeI(nkStmtList, c.info) let elemType = t.lastSon #fillBody(c, elemType, actions, genDeref(x), genDeref(y)) #var disposeCall = genBuiltin(c, mDispose, "dispose", x) if isFinal(elemType): addDestructorCall(c, elemType, actions, genDeref(x, nkDerefExpr)) var alignOf = genBuiltin(c, mAlignOf, "alignof", newNodeIT(nkType, c.info, elemType)) alignOf.typ = getSysType(c.g, c.info, tyInt) actions.add callCodegenProc(c.g, "nimRawDispose", c.info, x, alignOf) else: addDestructorCall(c, elemType, newNodeI(nkStmtList, c.info), genDeref(x, nkDerefExpr)) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, x) case c.kind of attachedSink, attachedAsgn: body.add genIf(c, x, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, x, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace, attachedDispose: discard proc closureOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = if c.kind == attachedDeepCopy: # a big problem is that we don't know the environment's type here, so we # have to go through some indirection; we delegate this to the codegen: let call = newNodeI(nkCall, c.info, 2) call.typ = t call[0] = newSymNode(createMagic(c.g, c.idgen, "deepCopy", mDeepCopy)) call[1] = y body.add newAsgnStmt(x, call) elif (optOwnedRefs in c.g.config.globalOptions and optRefCheck in c.g.config.options) or c.g.config.selectedGC in {gcArc, gcOrc}: let xx = genBuiltin(c, mAccessEnv, "accessEnv", x) xx.typ = getSysType(c.g, c.info, tyPointer) case c.kind of attachedSink: # we 'nil' y out afterwards so we *need* to take over its reference # count value: body.add genIf(c, xx, callCodegenProc(c.g, "nimDecWeakRef", c.info, xx)) body.add newAsgnStmt(x, y) of attachedAsgn: let yy = genBuiltin(c, mAccessEnv, "accessEnv", y) yy.typ = getSysType(c.g, c.info, tyPointer) body.add genIf(c, yy, callCodegenProc(c.g, "nimIncRef", c.info, yy)) body.add genIf(c, xx, callCodegenProc(c.g, "nimDecWeakRef", c.info, xx)) body.add newAsgnStmt(x, y) of attachedDestructor: let des = genIf(c, xx, callCodegenProc(c.g, "nimDecWeakRef", c.info, xx)) if body.len == 0: body.add des else: body.sons.insert(des, 0) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace, attachedDispose: discard proc ownedClosureOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = let xx = genBuiltin(c, mAccessEnv, "accessEnv", x) xx.typ = getSysType(c.g, c.info, tyPointer) var actions = newNodeI(nkStmtList, c.info) #discard addDestructorCall(c, elemType, newNodeI(nkStmtList, c.info), genDeref(xx)) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, xx) case c.kind of attachedSink, attachedAsgn: body.add genIf(c, xx, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, xx, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace, attachedDispose: discard proc fillBody(c: var TLiftCtx; t: PType; body, x, y: PNode) = case t.kind of tyNone, tyEmpty, tyVoid: discard of tyPointer, tySet, tyBool, tyChar, tyEnum, tyInt..tyUInt64, tyCstring, tyPtr, tyUncheckedArray, tyVar, tyLent: defaultOp(c, t, body, x, y) of tyRef: if c.g.config.selectedGC in {gcArc, gcOrc}: atomicRefOp(c, t, body, x, y) elif (optOwnedRefs in c.g.config.globalOptions and optRefCheck in c.g.config.options): weakrefOp(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyProc: if t.callConv == ccClosure: if c.g.config.selectedGC in {gcArc, gcOrc}: atomicClosureOp(c, t, body, x, y) else: closureOp(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyOwned: let base = t.skipTypes(abstractInstOwned) if optOwnedRefs in c.g.config.globalOptions: case base.kind of tyRef: ownedRefOp(c, base, body, x, y) return of tyProc: if base.callConv == ccClosure: ownedClosureOp(c, base, body, x, y) return else: discard defaultOp(c, base, body, x, y) of tyArray: if tfHasAsgn in t.flags or useNoGc(c, t): forallElements(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tySequence: if useNoGc(c, t): useSeqOrStrOp(c, t, body, x, y) elif optSeqDestructors in c.g.config.globalOptions: # note that tfHasAsgn is propagated so we need the check on # 'selectedGC' here to determine if we have the new runtime. discard considerUserDefinedOp(c, t, body, x, y) elif tfHasAsgn in t.flags: if c.kind in {attachedAsgn, attachedSink, attachedDeepCopy}: body.add newSeqCall(c, x, y) forallElements(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyString: if useNoGc(c, t): useSeqOrStrOp(c, t, body, x, y) elif tfHasAsgn in t.flags: discard considerUserDefinedOp(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyObject: if not considerUserDefinedOp(c, t, body, x, y): if c.kind in {attachedAsgn, attachedSink} and t.sym != nil and sfImportc in t.sym.flags: body.add newAsgnStmt(x, y) else: fillBodyObjT(c, t, body, x, y) of tyDistinct: if not considerUserDefinedOp(c, t, body, x, y): fillBody(c, t[0], body, x, y) of tyTuple: fillBodyTup(c, t, body, x, y) of tyVarargs, tyOpenArray: if c.kind == attachedDestructor and (tfHasAsgn in t.flags or useNoGc(c, t)): forallElements(c, t, body, x, y) else: discard "cannot copy openArray" of tyFromExpr, tyProxy, tyBuiltInTypeClass, tyUserTypeClass, tyUserTypeClassInst, tyCompositeTypeClass, tyAnd, tyOr, tyNot, tyAnything, tyGenericParam, tyGenericBody, tyNil, tyUntyped, tyTyped, tyTypeDesc, tyGenericInvocation, tyForward, tyStatic: #internalError(c.g.config, c.info, "assignment requested for type: " & typeToString(t)) discard of tyOrdinal, tyRange, tyInferred, tyGenericInst, tyAlias, tySink: fillBody(c, lastSon(t), body, x, y) of tyConcept, tyIterable: doAssert false proc produceSymDistinctType(g: ModuleGraph; c: PContext; typ: PType; kind: TTypeAttachedOp; info: TLineInfo; idgen: IdGenerator): PSym = assert typ.kind == tyDistinct let baseType = typ[0] if getAttachedOp(g, baseType, kind) == nil: discard produceSym(g, c, baseType, kind, info, idgen) result = getAttachedOp(g, baseType, kind) setAttachedOp(g, idgen.module, typ, kind, result) proc symPrototype(g: ModuleGraph; typ: PType; owner: PSym; kind: TTypeAttachedOp; info: TLineInfo; idgen: IdGenerator): PSym = let procname = getIdent(g.cache, AttachedOpToStr[kind]) result = newSym(skProc, procname, nextSymId(idgen), owner, info) let dest = newSym(skParam, getIdent(g.cache, "dest"), nextSymId(idgen), result, info) let src = newSym(skParam, getIdent(g.cache, if kind == attachedTrace: "env" else: "src"), nextSymId(idgen), result, info) dest.typ = makeVarType(typ.owner, typ, idgen) if kind == attachedTrace: src.typ = getSysType(g, info, tyPointer) else: src.typ = typ result.typ = newProcType(info, nextTypeId(idgen), owner) result.typ.addParam dest if kind notin {attachedDestructor, attachedDispose}: result.typ.addParam src if kind == attachedAsgn and g.config.selectedGC == gcOrc and cyclicType(typ.skipTypes(abstractInst)): let cycleParam = newSym(skParam, getIdent(g.cache, "cyclic"), nextSymId(idgen), result, info) cycleParam.typ = getSysType(g, info, tyBool) result.typ.addParam cycleParam var n = newNodeI(nkProcDef, info, bodyPos+1) for i in 0..<n.len: n[i] = newNodeI(nkEmpty, info) n[namePos] = newSymNode(result) n[paramsPos] = result.typ.n n[bodyPos] = newNodeI(nkStmtList, info) result.ast = n incl result.flags, sfFromGeneric incl result.flags, sfGeneratedOp proc produceSym(g: ModuleGraph; c: PContext; typ: PType; kind: TTypeAttachedOp; info: TLineInfo; idgen: IdGenerator): PSym = if typ.kind == tyDistinct: return produceSymDistinctType(g, c, typ, kind, info, idgen) result = getAttachedOp(g, typ, kind) if result == nil: result = symPrototype(g, typ, typ.owner, kind, info, idgen) var a = TLiftCtx(info: info, g: g, kind: kind, c: c, asgnForType: typ, idgen: idgen, fn: result) let dest = result.typ.n[1].sym let d = newDeref(newSymNode(dest)) let src = if kind in {attachedDestructor, attachedDispose}: newNodeIT(nkSym, info, getSysType(g, info, tyPointer)) else: newSymNode(result.typ.n[2].sym) # register this operation already: setAttachedOpPartial(g, idgen.module, typ, kind, result) if kind == attachedSink and destructorOverriden(g, typ): ## compiler can use a combination of `=destroy` and memCopy for sink op dest.flags.incl sfCursor result.ast[bodyPos].add newOpCall(a, getAttachedOp(g, typ, attachedDestructor), d[0]) result.ast[bodyPos].add newAsgnStmt(d, src) else: var tk: TTypeKind if g.config.selectedGC in {gcArc, gcOrc, gcHooks}: tk = skipTypes(typ, {tyOrdinal, tyRange, tyInferred, tyGenericInst, tyStatic, tyAlias, tySink}).kind else: tk = tyNone # no special casing for strings and seqs case tk of tySequence: fillSeqOp(a, typ, result.ast[bodyPos], d, src) of tyString: fillStrOp(a, typ, result.ast[bodyPos], d, src) else: fillBody(a, typ, result.ast[bodyPos], d, src) if not a.canRaise: incl result.flags, sfNeverRaises completePartialOp(g, idgen.module, typ, kind, result) proc produceDestructorForDiscriminator*(g: ModuleGraph; typ: PType; field: PSym, info: TLineInfo; idgen: IdGenerator): PSym = assert(typ.skipTypes({tyAlias, tyGenericInst}).kind == tyObject) result = symPrototype(g, field.typ, typ.owner, attachedDestructor, info, idgen) var a = TLiftCtx(info: info, g: g, kind: attachedDestructor, asgnForType: typ, idgen: idgen, fn: result) a.asgnForType = typ a.filterDiscriminator = field a.addMemReset = true let discrimantDest = result.typ.n[1].sym let dst = newSym(skVar, getIdent(g.cache, "dest"), nextSymId(idgen), result, info) dst.typ = makePtrType(typ.owner, typ, idgen) let dstSym = newSymNode(dst) let d = newDeref(dstSym) let v = newNodeI(nkVarSection, info) v.addVar(dstSym, genContainerOf(a, typ, field, discrimantDest)) result.ast[bodyPos].add v let placeHolder = newNodeIT(nkSym, info, getSysType(g, info, tyPointer)) fillBody(a, typ, result.ast[bodyPos], d, placeHolder) if not a.canRaise: incl result.flags, sfNeverRaises template liftTypeBoundOps*(c: PContext; typ: PType; info: TLineInfo) = discard "now a nop" proc patchBody(g: ModuleGraph; c: PContext; n: PNode; info: TLineInfo; idgen: IdGenerator) = if n.kind in nkCallKinds: if n[0].kind == nkSym and n[0].sym.magic == mDestroy: let t = n[1].typ.skipTypes(abstractVar) if getAttachedOp(g, t, attachedDestructor) == nil: discard produceSym(g, c, t, attachedDestructor, info, idgen) let op = getAttachedOp(g, t, attachedDestructor) if op != nil: if op.ast.isGenericRoutine: internalError(g.config, info, "resolved destructor is generic") if op.magic == mDestroy: internalError(g.config, info, "patching mDestroy with mDestroy?") n[0] = newSymNode(op) for x in n: patchBody(g, c, x, info, idgen) proc inst(g: ModuleGraph; c: PContext; t: PType; kind: TTypeAttachedOp; idgen: IdGenerator; info: TLineInfo) = let op = getAttachedOp(g, t, kind) if op != nil and op.ast != nil and op.ast.isGenericRoutine: if t.typeInst != nil: var a: TLiftCtx a.info = info a.g = g a.kind = kind a.c = c a.idgen = idgen let opInst = instantiateGeneric(a, op, t, t.typeInst) if opInst.ast != nil: patchBody(g, c, opInst.ast, info, a.idgen) setAttachedOp(g, idgen.module, t, kind, opInst) else: localError(g.config, info, "unresolved generic parameter") proc isTrival(s: PSym): bool {.inline.} = s == nil or (s.ast != nil and s.ast[bodyPos].len == 0) proc createTypeBoundOps(g: ModuleGraph; c: PContext; orig: PType; info: TLineInfo; idgen: IdGenerator) = ## In the semantic pass this is called in strategic places ## to ensure we lift assignment, destructors and moves properly. ## The later 'injectdestructors' pass depends on it. if orig == nil or {tfCheckedForDestructor, tfHasMeta} * orig.flags != {}: return incl orig.flags, tfCheckedForDestructor let skipped = orig.skipTypes({tyGenericInst, tyAlias, tySink}) if isEmptyContainer(skipped) or skipped.kind == tyStatic: return let h = sighashes.hashType(skipped, {CoType, CoConsiderOwned, CoDistinct}) var canon = g.canonTypes.getOrDefault(h) if canon == nil: g.canonTypes[h] = skipped canon = skipped # multiple cases are to distinguish here: # 1. we don't know yet if 'typ' has a nontrival destructor. # 2. we have a nop destructor. --> mDestroy # 3. we have a lifted destructor. # 4. We have a custom destructor. # 5. We have a (custom) generic destructor. # we do not generate '=trace' nor '=dispose' procs if we # have the cycle detection disabled, saves code size. let lastAttached = if g.config.selectedGC == gcOrc: attachedDispose else: attachedSink # bug #15122: We need to produce all prototypes before entering the # mind boggling recursion. Hacks like these imply we should rewrite # this module. var generics: array[attachedDestructor..attachedDispose, bool] for k in attachedDestructor..lastAttached: generics[k] = getAttachedOp(g, canon, k) != nil if not generics[k]: setAttachedOp(g, idgen.module, canon, k, symPrototype(g, canon, canon.owner, k, info, idgen)) # we generate the destructor first so that other operators can depend on it: for k in attachedDestructor..lastAttached: if not generics[k]: discard produceSym(g, c, canon, k, info, idgen) else: inst(g, c, canon, k, idgen, info) if canon != orig: setAttachedOp(g, idgen.module, orig, k, getAttachedOp(g, canon, k)) if not isTrival(getAttachedOp(g, orig, attachedDestructor)): #or not isTrival(orig.assignment) or # not isTrival(orig.sink): orig.flags.incl tfHasAsgn # ^ XXX Breaks IC!