# # # The Nim Compiler # (c) Copyright 2017 Andreas Rumpf # # See the file "copying.txt", included in this # distribution, for details about the copyright. # ## Injects destructor calls into Nim code as well as ## an optimizer that optimizes copies to moves. This is implemented as an ## AST to AST transformation so that every backend benefits from it. ## See doc/destructors.rst for a spec of the implemented rewrite rules ## XXX Optimization to implement: if a local variable is only assigned ## string literals as in ``let x = conf: "foo" else: "bar"`` do not ## produce a destructor call for ``x``. The address of ``x`` must also ## not have been taken. ``x = "abc"; x.add(...)`` # Todo: # - eliminate 'wasMoved(x); destroy(x)' pairs as a post processing step. import intsets, ast, astalgo, msgs, renderer, magicsys, types, idents, strutils, options, dfa, lowerings, tables, modulegraphs, msgs, lineinfos, parampatterns, sighashes, liftdestructors from trees import exprStructuralEquivalent from algorithm import reverse const scopeBasedDestruction = false type Con = object owner: PSym g: ControlFlowGraph jumpTargets: IntSet destroys, topLevelVars: PNode scopeDestroys: seq[PNode] # used as a stack that pop from # at strategic places which try to # mimic the natural scope. graph: ModuleGraph emptyNode: PNode otherRead: PNode inLoop, inSpawn, hasUnstructuredCf, inDangerousBranch: int declaredVars: IntSet # variables we already moved to the top level uninit: IntSet # set of uninit'ed vars uninitComputed: bool ProcessMode = enum normal consumed sinkArg const toDebug {.strdefine.} = "" template dbg(body) = when toDebug.len > 0: if c.owner.name.s == toDebug or toDebug == "always": body proc p(n: PNode; c: var Con; mode: ProcessMode): PNode proc moveOrCopy(dest, ri: PNode; c: var Con): PNode proc isLastRead(location: PNode; c: var Con; pc, comesFrom: int): int = var pc = pc while pc < c.g.len: case c.g[pc].kind of def: if defInstrTargets(c.g[pc], location): # the path leads to a redefinition of 's' --> abandon it. return high(int) inc pc of use: if useInstrTargets(c.g[pc], location): c.otherRead = c.g[pc].n return -1 inc pc of goto: pc = pc + c.g[pc].dest of fork: # every branch must lead to the last read of the location: let variantA = isLastRead(location, c, pc+1, pc) if variantA < 0: return -1 var variantB = isLastRead(location, c, pc + c.g[pc].dest, pc) if variantB < 0: return -1 elif variantB == high(int): variantB = variantA pc = variantB of InstrKind.join: let dest = pc + c.g[pc].dest if dest == comesFrom: return pc + 1 inc pc return pc proc isLastRead(n: PNode; c: var Con): bool = # first we need to search for the instruction that belongs to 'n': c.otherRead = nil var instr = -1 let m = dfa.skipConvDfa(n) for i in 0..= c.g.len: return true result = isLastRead(n, c, instr+1, -1) >= 0 dbg: echo "ugh ", c.otherRead.isNil, " ", result proc isFirstWrite(location: PNode; c: var Con; pc, comesFrom: int; instr: int): int = var pc = pc while pc < instr: case c.g[pc].kind of def: if defInstrTargets(c.g[pc], location): # a definition of 's' before ours makes ours not the first write return -1 inc pc of use: if useInstrTargets(c.g[pc], location): return -1 inc pc of goto: pc = pc + c.g[pc].dest of fork: # every branch must not contain a def/use of our location: let variantA = isFirstWrite(location, c, pc+1, pc, instr) if variantA < 0: return -1 var variantB = isFirstWrite(location, c, pc + c.g[pc].dest, pc, instr + c.g[pc].dest) if variantB < 0: return -1 elif variantB == high(int): variantB = variantA pc = variantB of InstrKind.join: let dest = pc + c.g[pc].dest if dest == comesFrom: return pc + 1 inc pc return pc proc isFirstWrite(n: PNode; c: var Con): bool = # first we need to search for the instruction that belongs to 'n': var instr = -1 let m = dfa.skipConvDfa(n) for i in countdown(c.g.len-1, 0): # We search backwards here to treat loops correctly if c.g[i].kind == def and c.g[i].n == m: if instr < 0: instr = i break if instr < 0: return false # we go through all paths going to 'instr' and need to # ensure that we don't find another 'def/use X' instruction. if instr == 0: return true result = isFirstWrite(n, c, 0, -1, instr) >= 0 proc initialized(code: ControlFlowGraph; pc: int, init, uninit: var IntSet; comesFrom: int): int = ## Computes the set of definitely initialized variables across all code paths ## as an IntSet of IDs. var pc = pc while pc < code.len: case code[pc].kind of goto: pc = pc + code[pc].dest of fork: let target = pc + code[pc].dest var initA = initIntSet() var initB = initIntSet() let pcA = initialized(code, pc+1, initA, uninit, pc) discard initialized(code, target, initB, uninit, pc) # we add vars if they are in both branches: for v in initA: if v in initB: init.incl v pc = pcA+1 of InstrKind.join: let target = pc + code[pc].dest if comesFrom == target: return pc inc pc of use: let v = code[pc].n.sym if v.kind != skParam and v.id notin init: # attempt to read an uninit'ed variable uninit.incl v.id inc pc of def: let v = code[pc].n.sym init.incl v.id inc pc return pc template isUnpackedTuple(n: PNode): bool = ## we move out all elements of unpacked tuples, ## hence unpacked tuples themselves don't need to be destroyed (n.kind == nkSym and n.sym.kind == skTemp and n.sym.typ.kind == tyTuple) proc checkForErrorPragma(c: Con; t: PType; ri: PNode; opname: string) = var m = "'" & opname & "' is not available for type <" & typeToString(t) & ">" if opname == "=" and ri != nil: m.add "; requires a copy because it's not the last read of '" m.add renderTree(ri) m.add '\'' if c.otherRead != nil: m.add "; another read is done here: " m.add c.graph.config $ c.otherRead.info elif ri.kind == nkSym and ri.sym.kind == skParam and not isSinkType(ri.sym.typ): m.add "; try to make " m.add renderTree(ri) m.add " a 'sink' parameter" m.add "; routine: " m.add c.owner.name.s localError(c.graph.config, ri.info, errGenerated, m) proc makePtrType(c: Con, baseType: PType): PType = result = newType(tyPtr, c.owner) addSonSkipIntLit(result, baseType) proc genOp(c: Con; op: PSym; dest: PNode): PNode = let addrExp = newNodeIT(nkHiddenAddr, dest.info, makePtrType(c, dest.typ)) addrExp.add(dest) result = newTree(nkCall, newSymNode(op), addrExp) proc genOp(c: Con; t: PType; kind: TTypeAttachedOp; dest, ri: PNode): PNode = var op = t.attachedOps[kind] if op == nil or op.ast[genericParamsPos].kind != nkEmpty: # give up and find the canonical type instead: let h = sighashes.hashType(t, {CoType, CoConsiderOwned, CoDistinct}) let canon = c.graph.canonTypes.getOrDefault(h) if canon != nil: op = canon.attachedOps[kind] if op == nil: #echo dest.typ.id globalError(c.graph.config, dest.info, "internal error: '" & AttachedOpToStr[kind] & "' operator not found for type " & typeToString(t)) elif op.ast[genericParamsPos].kind != nkEmpty: globalError(c.graph.config, dest.info, "internal error: '" & AttachedOpToStr[kind] & "' operator is generic") dbg: if kind == attachedDestructor: echo "destructor is ", op.id, " ", op.ast if sfError in op.flags: checkForErrorPragma(c, t, ri, AttachedOpToStr[kind]) genOp(c, op, dest) proc genDestroy(c: Con; dest: PNode): PNode = let t = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink}) result = genOp(c, t, attachedDestructor, dest, nil) when false: proc preventMoveRef(dest, ri: PNode): bool = let lhs = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink}) var ri = ri if ri.kind in nkCallKinds and ri[0].kind == nkSym and ri[0].sym.magic == mUnown: ri = ri[1] let rhs = ri.typ.skipTypes({tyGenericInst, tyAlias, tySink}) result = lhs.kind == tyRef and rhs.kind == tyOwned proc canBeMoved(c: Con; t: PType): bool {.inline.} = let t = t.skipTypes({tyGenericInst, tyAlias, tySink}) if optOwnedRefs in c.graph.config.globalOptions: result = t.kind != tyRef and t.attachedOps[attachedSink] != nil else: result = t.attachedOps[attachedSink] != nil proc genSink(c: var Con; dest, ri: PNode): PNode = if isUnpackedTuple(dest) or isFirstWrite(dest, c): # optimize sink call into a bitwise memcopy result = newTree(nkFastAsgn, dest, ri) else: let t = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink}) if t.attachedOps[attachedSink] != nil: result = genOp(c, t, attachedSink, dest, ri) result.add ri else: # the default is to use combination of `=destroy(dest)` and # and copyMem(dest, source). This is efficient. let snk = newTree(nkFastAsgn, dest, ri) result = newTree(nkStmtList, genDestroy(c, dest), snk) proc genCopyNoCheck(c: Con; dest, ri: PNode): PNode = let t = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink}) result = genOp(c, t, attachedAsgn, dest, ri) proc genCopy(c: var Con; dest, ri: PNode): PNode = let t = dest.typ if tfHasOwned in t.flags and ri.kind != nkNilLit: # try to improve the error message here: if c.otherRead == nil: discard isLastRead(ri, c) checkForErrorPragma(c, t, ri, "=") result = genCopyNoCheck(c, dest, ri) proc addTopVar(c: var Con; v: PNode) = c.topLevelVars.add newTree(nkIdentDefs, v, c.emptyNode, c.emptyNode) proc getTemp(c: var Con; typ: PType; info: TLineInfo): PNode = let sym = newSym(skTemp, getIdent(c.graph.cache, ":tmpD"), c.owner, info) sym.typ = typ result = newSymNode(sym) proc genDiscriminantAsgn(c: var Con; n: PNode): PNode = # discriminator is ordinal value that doesn't need sink destroy # but fields within active case branch might need destruction # tmp to support self assignments let tmp = getTemp(c, n[1].typ, n.info) c.addTopVar(tmp) result = newTree(nkStmtList) result.add newTree(nkFastAsgn, tmp, p(n[1], c, consumed)) result.add p(n[0], c, normal) let le = p(n[0], c, normal) let leDotExpr = if le.kind == nkCheckedFieldExpr: le[0] else: le let objType = leDotExpr[0].typ if hasDestructor(objType): if objType.attachedOps[attachedDestructor] != nil and sfOverriden in objType.attachedOps[attachedDestructor].flags: localError(c.graph.config, n.info, errGenerated, """Assignment to discriminant for object's with user defined destructor is not supported, object must have default destructor. It is best to factor out piece of object that needs custom destructor into separate object or not use discriminator assignment""") result.add newTree(nkFastAsgn, le, tmp) return # generate: if le != tmp: `=destroy`(le) let branchDestructor = produceDestructorForDiscriminator(c.graph, objType, leDotExpr[1].sym, n.info) let cond = newNodeIT(nkInfix, n.info, getSysType(c.graph, unknownLineInfo, tyBool)) cond.add newSymNode(getMagicEqSymForType(c.graph, le.typ, n.info)) cond.add le cond.add tmp let notExpr = newNodeIT(nkPrefix, n.info, getSysType(c.graph, unknownLineInfo, tyBool)) notExpr.add newSymNode(createMagic(c.graph, "not", mNot)) notExpr.add cond result.add newTree(nkIfStmt, newTree(nkElifBranch, notExpr, genOp(c, branchDestructor, le))) result.add newTree(nkFastAsgn, le, tmp) else: result.add newTree(nkFastAsgn, le, tmp) proc genWasMoved(n: PNode; c: var Con): PNode = result = newNodeI(nkCall, n.info) result.add(newSymNode(createMagic(c.graph, "wasMoved", mWasMoved))) result.add copyTree(n) #mWasMoved does not take the address proc genDefaultCall(t: PType; c: Con; info: TLineInfo): PNode = result = newNodeI(nkCall, info) result.add(newSymNode(createMagic(c.graph, "default", mDefault))) result.typ = t proc destructiveMoveVar(n: PNode; c: var Con): PNode = # generate: (let tmp = v; reset(v); tmp) if not hasDestructor(n.typ): result = copyTree(n) else: result = newNodeIT(nkStmtListExpr, n.info, n.typ) var temp = newSym(skLet, getIdent(c.graph.cache, "blitTmp"), c.owner, n.info) temp.typ = n.typ var v = newNodeI(nkLetSection, n.info) let tempAsNode = newSymNode(temp) var vpart = newNodeI(nkIdentDefs, tempAsNode.info, 3) vpart[0] = tempAsNode vpart[1] = c.emptyNode vpart[2] = n v.add(vpart) result.add v result.add genWasMoved(skipConv(n), c) result.add tempAsNode proc sinkParamIsLastReadCheck(c: var Con, s: PNode) = assert s.kind == nkSym and s.sym.kind == skParam if not isLastRead(s, c): localError(c.graph.config, c.otherRead.info, "sink parameter `" & $s.sym.name.s & "` is already consumed at " & toFileLineCol(c. graph.config, s.info)) proc isClosureEnv(n: PNode): bool = n.kind == nkSym and n.sym.name.s[0] == ':' proc passCopyToSink(n: PNode; c: var Con): PNode = result = newNodeIT(nkStmtListExpr, n.info, n.typ) let tmp = getTemp(c, n.typ, n.info) when not scopeBasedDestruction: c.addTopVar(tmp) if hasDestructor(n.typ): result.add genWasMoved(tmp, c) var m = genCopy(c, tmp, n) m.add p(n, c, normal) result.add m if isLValue(n) and not isClosureEnv(n) and n.typ.skipTypes(abstractInst).kind != tyRef and c.inSpawn == 0: message(c.graph.config, n.info, hintPerformance, ("passing '$1' to a sink parameter introduces an implicit copy; " & "use 'move($1)' to prevent it") % $n) else: if c.graph.config.selectedGC in {gcArc, gcOrc}: assert(not containsGarbageCollectedRef(n.typ)) result.add newTree(nkAsgn, tmp, p(n, c, normal)) # Since we know somebody will take over the produced copy, there is # no need to destroy it. result.add tmp proc isDangerousSeq(t: PType): bool {.inline.} = let t = t.skipTypes(abstractInst) result = t.kind == tySequence and tfHasOwned notin t[0].flags proc containsConstSeq(n: PNode): bool = if n.kind == nkBracket and n.len > 0 and n.typ != nil and isDangerousSeq(n.typ): return true result = false case n.kind of nkExprEqExpr, nkExprColonExpr, nkHiddenStdConv, nkHiddenSubConv: result = containsConstSeq(n[1]) of nkObjConstr, nkClosure: for i in 1.. 0 and body[^1].kind == nkSym: # special case: Do not translate (x; y; sym) into # (x; y; tmp = sym; destroy(x); destroy(y); tmp ) # but into # (x; y; destroy(x); destroy(y); sym ) let sym = body[^1] body[^1] = c.scopeDestroys[^1] for i in countdown(c.scopeDestroys.high - 1, oldTmpDestroysLen): body.add c.scopeDestroys[i] body.add sym else: # fun ahead: We have to transform (x; y; E()) into # (x; y; tmp = E(); destroy(x); destroy(y); tmp ) let t2 = body[^1].typ let tmp = getTemp(c, t2, body.info) when not scopeBasedDestruction: c.addTopVar(tmp) # the tmp does not have to be initialized var n = newNodeIT(nkStmtListExpr, body.info, t2) n.add newTree(nkFastAsgn, tmp, body[^1]) for i in countdown(c.scopeDestroys.high, oldTmpDestroysLen): n.add c.scopeDestroys[i] n.add tmp body[^1] = n #c.scopeDestroys.add genDestroy(c, tmp) else: # unstructured control flow was used, use a 'try finally' to ensure # destruction: if isEmptyType(t): var n = newNodeI(nkStmtList, body.info) for i in countdown(c.scopeDestroys.high, oldTmpDestroysLen): n.add c.scopeDestroys[i] body[^1] = newTryFinally(body[^1], n) else: # fun ahead: We have to transform (x; y; E()) into # ((try: tmp = (x; y; E()); finally: destroy(x); destroy(y)); tmp ) let t2 = body[^1].typ let tmp = getTemp(c, t2, body.info) when not scopeBasedDestruction: c.addTopVar(tmp) # the tmp does not have to be initialized var fin = newNodeI(nkStmtList, body.info) for i in countdown(c.scopeDestroys.high, oldTmpDestroysLen): fin.add c.scopeDestroys[i] var n = newNodeIT(nkStmtListExpr, body.info, t2) n.add newTryFinally(newTree(nkFastAsgn, tmp, body[^1]), fin) n.add tmp body[^1] = n #c.scopeDestroys.add genDestroy(c, tmp) c.scopeDestroys.setLen oldTmpDestroysLen proc handleNested(n, dest: PNode; c: var Con; mode: ProcessMode): PNode = template processCall(node: PNode): PNode = if node.typ == nil or dest == nil: p(node, c, mode) else: moveOrCopy(dest, node, c) proc handleScope(n, dest: PNode; t: PType; takeOver: Natural; c: var Con; mode: ProcessMode): PNode = let oldHasUnstructuredCf = c.hasUnstructuredCf let oldTmpDestroysLen = c.scopeDestroys.len result = shallowCopy(n) for i in 0.. oldTmpDestroysLen: handleTmpDestroys(c, result, t, oldHasUnstructuredCf, oldTmpDestroysLen) else: setLen(result.sons, last) if c.scopeDestroys.len > oldTmpDestroysLen: handleTmpDestroys(c, result, t, oldHasUnstructuredCf, oldTmpDestroysLen) if result.kind != nkFinally: result.add processCall(n[last]) else: result = newTree(nkStmtListExpr, result, processCall(n[last])) result.typ = t case n.kind of nkStmtList, nkStmtListExpr: if n.len == 0: return n result = shallowCopy(n) let last = n.len - 1 for i in 0.. 0: # unpacked tuple needs reset at every loop iteration res.add newTree(nkFastAsgn, v, genDefaultCall(v.typ, c, v.info)) elif sfThread notin v.sym.flags: # do not destroy thread vars for now at all for consistency. if sfGlobal in v.sym.flags: c.graph.globalDestructors.add genDestroy(c, v) else: c.destroys.add genDestroy(c, v) if ri.kind == nkEmpty and c.inLoop > 0: res.add moveOrCopy(v, genDefaultCall(v.typ, c, v.info), c) elif ri.kind != nkEmpty: res.add moveOrCopy(v, ri, c) proc pVarScoped(v: PNode; c: var Con; ri, res: PNode) = if not containsOrIncl(c.declaredVars, v.sym.id): c.addTopVar(v) if isUnpackedTuple(v): if c.inLoop > 0: # unpacked tuple needs reset at every loop iteration res.add newTree(nkFastAsgn, v, genDefaultCall(v.typ, c, v.info)) elif {sfGlobal, sfThread} * v.sym.flags == {sfGlobal}: c.graph.globalDestructors.add genDestroy(c, v) else: # We always translate 'var v = f()' into bitcopies. If 'v' is in a loop, # the destruction at the loop end will free the resources. Other assignments # will destroy the old value inside 'v'. If we have 'var v' without an initial # default value we translate it into 'var v = default()'. We translate # 'var x = someGlobal' into 'var v = default(); `=`(v, someGlobal). The # lack of copy constructors is really beginning to hurt us. :-( #if c.inDangerousBranch == 0: v.sym.flags.incl sfNoInit c.scopeDestroys.add genDestroy(c, v) if ri.kind == nkEmpty and c.inLoop > 0: res.add moveOrCopy(v, genDefaultCall(v.typ, c, v.info), c) elif ri.kind != nkEmpty: res.add moveOrCopy(v, ri, c) template handleNestedTempl(n: untyped, processCall: untyped) = case n.kind of nkStmtList, nkStmtListExpr: if n.len == 0: return n result = copyNode(n) for i in 0.. 0: c.inSpawn.dec let parameters = n[0].typ let L = if parameters != nil: parameters.len else: 0 var isDangerous = false if n[0].kind == nkSym and n[0].sym.magic in {mOr, mAnd}: inc c.inDangerousBranch isDangerous = true result = shallowCopy(n) for i in 1.. 0): result[i] = p(n[i], c, sinkArg) else: result[i] = p(n[i], c, normal) if isDangerous: dec c.inDangerousBranch if n[0].kind == nkSym and n[0].sym.magic in {mNew, mNewFinalize}: result[0] = copyTree(n[0]) if c.graph.config.selectedGC in {gcHooks, gcArc, gcOrc}: let destroyOld = genDestroy(c, result[1]) result = newTree(nkStmtList, destroyOld, result) else: result[0] = p(n[0], c, normal) when scopeBasedDestruction: if canRaise(n[0]): inc c.hasUnstructuredCf if mode == normal: result = ensureDestruction(result, c) of nkDiscardStmt: # Small optimization result = shallowCopy(n) if n[0].kind != nkEmpty: result[0] = p(n[0], c, normal) else: result[0] = copyNode(n[0]) of nkVarSection, nkLetSection: # transform; var x = y to var x; x op y where op is a move or copy result = newNodeI(nkStmtList, n.info) for it in n: var ri = it[^1] if it.kind == nkVarTuple and hasDestructor(ri.typ): let x = lowerTupleUnpacking(c.graph, it, c.owner) result.add p(x, c, consumed) elif it.kind == nkIdentDefs and hasDestructor(it[0].typ) and not isCursor(it[0]): for j in 0.. 0: ri = genDefaultCall(v.typ, c, v.info) if ri.kind != nkEmpty: result.add moveOrCopy(v, ri, c) else: # keep the var but transform 'ri': var v = copyNode(n) var itCopy = copyNode(it) for j in 0.. 0 and isDangerousSeq(ri.typ): result = genCopy(c, dest, ri) result.add p(ri, c, consumed) else: result = genSink(c, dest, p(ri, c, consumed)) of nkObjConstr, nkTupleConstr, nkClosure, nkCharLit..nkNilLit: result = genSink(c, dest, p(ri, c, consumed)) of nkSym: if isSinkParam(ri.sym) and isLastRead(ri, c): # Rule 3: `=sink`(x, z); wasMoved(z) #sinkParamIsLastReadCheck(c, ri) let snk = genSink(c, dest, ri) result = newTree(nkStmtList, snk, genWasMoved(ri, c)) elif ri.sym.kind != skParam and ri.sym.owner == c.owner and isLastRead(ri, c) and canBeMoved(c, dest.typ): # Rule 3: `=sink`(x, z); wasMoved(z) let snk = genSink(c, dest, ri) result = newTree(nkStmtList, snk, genWasMoved(ri, c)) else: result = genCopy(c, dest, ri) result.add p(ri, c, consumed) of nkHiddenSubConv, nkHiddenStdConv, nkConv: when false: result = moveOrCopy(dest, ri[1], c) if not sameType(ri.typ, ri[1].typ): let copyRi = copyTree(ri) copyRi[1] = result[^1] result[^1] = copyRi else: result = genSink(c, dest, p(ri, c, sinkArg)) of nkObjDownConv, nkObjUpConv: when false: result = moveOrCopy(dest, ri[0], c) let copyRi = copyTree(ri) copyRi[0] = result[^1] result[^1] = copyRi else: result = genSink(c, dest, p(ri, c, sinkArg)) of nkStmtListExpr, nkBlockExpr, nkIfExpr, nkCaseStmt: when scopeBasedDestruction: result = handleNested(ri, dest, c, normal) else: handleNestedTempl(ri): moveOrCopy(dest, node, c) else: if isAnalysableFieldAccess(ri, c.owner) and isLastRead(ri, c) and canBeMoved(c, dest.typ): # Rule 3: `=sink`(x, z); wasMoved(z) let snk = genSink(c, dest, ri) result = newTree(nkStmtList, snk, genWasMoved(ri, c)) else: result = genCopy(c, dest, ri) result.add p(ri, c, consumed) proc computeUninit(c: var Con) = if not c.uninitComputed: c.uninitComputed = true c.uninit = initIntSet() var init = initIntSet() discard initialized(c.g, pc = 0, init, c.uninit, comesFrom = -1) proc injectDefaultCalls(n: PNode, c: var Con) = case n.kind of nkVarSection, nkLetSection: for it in n: if it.kind == nkIdentDefs and it[^1].kind == nkEmpty: computeUninit(c) for j in 0.. 0: result.add c.topLevelVars if c.destroys.len > 0 or c.scopeDestroys.len > 0: reverse c.destroys.sons var fin: PNode if owner.kind == skModule: fin = newTryFinally(body, extractDestroysForTemporaries(c, c.destroys)) g.globalDestructors.add c.destroys else: fin = newTryFinally(body, c.destroys) for i in countdown(c.scopeDestroys.high, 0): fin[1][0].add c.scopeDestroys[i] result.add fin else: result.add body dbg: echo ">---------transformed-to--------->" echo renderTree(result, {renderIds})