# # # The Nim Compiler # (c) Copyright 2015 Andreas Rumpf # # See the file "copying.txt", included in this # distribution, for details about the copyright. # ## This module implements lifting for type-bound operations ## (``=sink``, ``=``, ``=destroy``, ``=deepCopy``). import modulegraphs, lineinfos, idents, ast, renderer, semdata, sighashes, lowerings, options, types, msgs, magicsys, tables, ccgutils from trees import isCaseObj type TLiftCtx = object g: ModuleGraph info: TLineInfo # for construction kind: TTypeAttachedOp fn: PSym asgnForType: PType recurse: bool addMemReset: bool # add wasMoved() call after destructor call canRaise: bool filterDiscriminator: PSym # we generating destructor for case branch c: PContext # c can be nil, then we are called from lambdalifting! idgen: IdGenerator template destructor*(t: PType): PSym = getAttachedOp(c.g, t, attachedDestructor) template assignment*(t: PType): PSym = getAttachedOp(c.g, t, attachedAsgn) template asink*(t: PType): PSym = getAttachedOp(c.g, t, attachedSink) proc fillBody(c: var TLiftCtx; t: PType; body, x, y: PNode) proc produceSym(g: ModuleGraph; c: PContext; typ: PType; kind: TTypeAttachedOp; info: TLineInfo; idgen: IdGenerator): PSym proc createTypeBoundOps*(g: ModuleGraph; c: PContext; orig: PType; info: TLineInfo; idgen: IdGenerator) proc at(a, i: PNode, elemType: PType): PNode = result = newNodeI(nkBracketExpr, a.info, 2) result[0] = a result[1] = i result.typ = elemType proc destructorOverriden(g: ModuleGraph; t: PType): bool = let op = getAttachedOp(g, t, attachedDestructor) op != nil and sfOverriden in op.flags proc fillBodyTup(c: var TLiftCtx; t: PType; body, x, y: PNode) = for i in 0.. 0 and t[0] != nil: fillBody(c, skipTypes(t[0], abstractPtrs), body, x, y) fillBodyObj(c, t.n, body, x, y, enforceDefaultOp = false) proc fillBodyObjT(c: var TLiftCtx; t: PType, body, x, y: PNode) = var hasCase = isCaseObj(t.n) var obj = t while obj.len > 0 and obj[0] != nil: obj = skipTypes(obj[0], abstractPtrs) hasCase = hasCase or isCaseObj(obj.n) if hasCase and c.kind in {attachedAsgn, attachedDeepCopy}: # assignment for case objects is complex, we do: # =destroy(dest) # wasMoved(dest) # for every field: # `=` dest.field, src.field # ^ this is what we used to do, but for 'result = result.sons[0]' it # destroys 'result' too early. # So this is what we really need to do: # let blob {.cursor.} = dest # remembers the old dest.kind # wasMoved(dest) # dest.kind = src.kind # for every field (dependent on dest.kind): # `=` dest.field, src.field # =destroy(blob) var dummy = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId c.idgen, c.fn, c.info) dummy.typ = y.typ if ccgIntroducedPtr(c.g.config, dummy, y.typ): # Because of potential aliasing when the src param is passed by ref, we need to check for equality here, # because the wasMoved(dest) call would zero out src, if dest aliases src. var cond = newTree(nkCall, newSymNode(c.g.getSysMagic(c.info, "==", mEqRef)), newTreeIT(nkAddr, c.info, makePtrType(c.fn, x.typ, c.idgen), x), newTreeIT(nkAddr, c.info, makePtrType(c.fn, y.typ, c.idgen), y)) cond.typ = getSysType(c.g, x.info, tyBool) body.add genIf(c, cond, newTreeI(nkReturnStmt, c.info, newNodeI(nkEmpty, c.info))) var temp = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId c.idgen, c.fn, c.info) temp.typ = x.typ incl(temp.flags, sfFromGeneric) var v = newNodeI(nkVarSection, c.info) let blob = newSymNode(temp) v.addVar(blob, x) body.add v #body.add newAsgnStmt(blob, x) var wasMovedCall = newNodeI(nkCall, c.info) wasMovedCall.add(newSymNode(createMagic(c.g, c.idgen, "wasMoved", mWasMoved))) wasMovedCall.add x # mWasMoved does not take the address body.add wasMovedCall fillBodyObjTImpl(c, t, body, x, y) when false: # does not work yet due to phase-ordering problems: assert t.destructor != nil body.add destructorCall(c.g, t.destructor, blob) let prevKind = c.kind c.kind = attachedDestructor fillBodyObjTImpl(c, t, body, blob, y) c.kind = prevKind else: fillBodyObjTImpl(c, t, body, x, y) proc boolLit*(g: ModuleGraph; info: TLineInfo; value: bool): PNode = result = newIntLit(g, info, ord value) result.typ = getSysType(g, info, tyBool) proc getCycleParam(c: TLiftCtx): PNode = assert c.kind == attachedAsgn if c.fn.typ.len == 4: result = c.fn.typ.n.lastSon assert result.kind == nkSym assert result.sym.name.s == "cyclic" else: result = boolLit(c.g, c.info, true) proc newHookCall(c: var TLiftCtx; op: PSym; x, y: PNode): PNode = #if sfError in op.flags: # localError(c.config, x.info, "usage of '$1' is a user-defined error" % op.name.s) result = newNodeI(nkCall, x.info) result.add newSymNode(op) if sfNeverRaises notin op.flags: c.canRaise = true if op.typ.sons[1].kind == tyVar: result.add genAddr(c, x) else: result.add x if y != nil: result.add y if op.typ.len == 4: assert y != nil if c.fn.typ.len == 4: result.add getCycleParam(c) else: # assume the worst: A cycle is created: result.add boolLit(c.g, y.info, true) proc newOpCall(c: var TLiftCtx; op: PSym; x: PNode): PNode = result = newNodeIT(nkCall, x.info, op.typ[0]) result.add(newSymNode(op)) result.add x if sfNeverRaises notin op.flags: c.canRaise = true proc newDeepCopyCall(c: var TLiftCtx; op: PSym; x, y: PNode): PNode = result = newAsgnStmt(x, newOpCall(c, op, y)) proc usesBuiltinArc(t: PType): bool = proc wrap(t: PType): bool {.nimcall.} = ast.isGCedMem(t) result = types.searchTypeFor(t, wrap) proc useNoGc(c: TLiftCtx; t: PType): bool {.inline.} = result = optSeqDestructors in c.g.config.globalOptions and ({tfHasGCedMem, tfHasOwned} * t.flags != {} or usesBuiltinArc(t)) proc requiresDestructor(c: TLiftCtx; t: PType): bool {.inline.} = result = optSeqDestructors in c.g.config.globalOptions and containsGarbageCollectedRef(t) proc instantiateGeneric(c: var TLiftCtx; op: PSym; t, typeInst: PType): PSym = if c.c != nil and typeInst != nil: result = c.c.instTypeBoundOp(c.c, op, typeInst, c.info, attachedAsgn, 1) else: localError(c.g.config, c.info, "cannot generate destructor for generic type: " & typeToString(t)) result = nil proc considerAsgnOrSink(c: var TLiftCtx; t: PType; body, x, y: PNode; field: var PSym): bool = if optSeqDestructors in c.g.config.globalOptions: var op = field let destructorOverriden = destructorOverriden(c.g, t) if op != nil and op != c.fn and (sfOverriden in op.flags or destructorOverriden): if sfError in op.flags: incl c.fn.flags, sfError #else: # markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add newHookCall(c, op, x, y) result = true elif op == nil and destructorOverriden: op = produceSym(c.g, c.c, t, c.kind, c.info, c.idgen) body.add newHookCall(c, op, x, y) result = true elif tfHasAsgn in t.flags: var op: PSym if sameType(t, c.asgnForType): # generate recursive call: if c.recurse: op = c.fn else: c.recurse = true return false else: op = field if op == nil: op = produceSym(c.g, c.c, t, c.kind, c.info, c.idgen) if sfError in op.flags: incl c.fn.flags, sfError #else: # markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) # We also now do generic instantiations in the destructor lifting pass: if op.ast.isGenericRoutine: op = instantiateGeneric(c, op, t, t.typeInst) field = op #echo "trying to use ", op.ast #echo "for ", op.name.s, " " #debug(t) #return false assert op.ast[genericParamsPos].kind == nkEmpty body.add newHookCall(c, op, x, y) result = true proc addDestructorCall(c: var TLiftCtx; orig: PType; body, x: PNode) = let t = orig.skipTypes(abstractInst - {tyDistinct}) var op = t.destructor if op != nil and sfOverriden in op.flags: if op.ast.isGenericRoutine: # patch generic destructor: op = instantiateGeneric(c, op, t, t.typeInst) setAttachedOp(c.g, c.idgen.module, t, attachedDestructor, op) if op == nil and (useNoGc(c, t) or requiresDestructor(c, t)): op = produceSym(c.g, c.c, t, attachedDestructor, c.info, c.idgen) doAssert op != nil doAssert op == t.destructor if op != nil: #markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add destructorCall(c, op, x) elif useNoGc(c, t): internalError(c.g.config, c.info, "type-bound operator could not be resolved") proc considerUserDefinedOp(c: var TLiftCtx; t: PType; body, x, y: PNode): bool = case c.kind of attachedDestructor: var op = t.destructor if op != nil and sfOverriden in op.flags: if op.ast.isGenericRoutine: # patch generic destructor: op = instantiateGeneric(c, op, t, t.typeInst) setAttachedOp(c.g, c.idgen.module, t, attachedDestructor, op) #markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add destructorCall(c, op, x) result = true #result = addDestructorCall(c, t, body, x) of attachedAsgn, attachedSink, attachedTrace: var op = getAttachedOp(c.g, t, c.kind) if op != nil and sfOverriden in op.flags: if op.ast.isGenericRoutine: # patch generic =trace: op = instantiateGeneric(c, op, t, t.typeInst) setAttachedOp(c.g, c.idgen.module, t, c.kind, op) result = considerAsgnOrSink(c, t, body, x, y, op) if op != nil: setAttachedOp(c.g, c.idgen.module, t, c.kind, op) of attachedDeepCopy: let op = getAttachedOp(c.g, t, attachedDeepCopy) if op != nil: #markUsed(c.g.config, c.info, op, c.g.usageSym) onUse(c.info, op) body.add newDeepCopyCall(c, op, x, y) result = true proc declareCounter(c: var TLiftCtx; body: PNode; first: BiggestInt): PNode = var temp = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId(c.idgen), c.fn, c.info) temp.typ = getSysType(c.g, body.info, tyInt) incl(temp.flags, sfFromGeneric) var v = newNodeI(nkVarSection, c.info) result = newSymNode(temp) v.addVar(result, lowerings.newIntLit(c.g, body.info, first)) body.add v proc declareTempOf(c: var TLiftCtx; body: PNode; value: PNode): PNode = var temp = newSym(skTemp, getIdent(c.g.cache, lowerings.genPrefix), nextSymId(c.idgen), c.fn, c.info) temp.typ = value.typ incl(temp.flags, sfFromGeneric) var v = newNodeI(nkVarSection, c.info) result = newSymNode(temp) v.addVar(result, value) body.add v proc addIncStmt(c: var TLiftCtx; body, i: PNode) = let incCall = genBuiltin(c, mInc, "inc", i) incCall.add lowerings.newIntLit(c.g, c.info, 1) body.add incCall proc newSeqCall(c: var TLiftCtx; x, y: PNode): PNode = # don't call genAddr(c, x) here: result = genBuiltin(c, mNewSeq, "newSeq", x) let lenCall = genBuiltin(c, mLengthSeq, "len", y) lenCall.typ = getSysType(c.g, x.info, tyInt) result.add lenCall proc setLenStrCall(c: var TLiftCtx; x, y: PNode): PNode = let lenCall = genBuiltin(c, mLengthStr, "len", y) lenCall.typ = getSysType(c.g, x.info, tyInt) result = genBuiltin(c, mSetLengthStr, "setLen", x) # genAddr(g, x)) result.add lenCall proc setLenSeqCall(c: var TLiftCtx; t: PType; x, y: PNode): PNode = let lenCall = genBuiltin(c, mLengthSeq, "len", y) lenCall.typ = getSysType(c.g, x.info, tyInt) var op = getSysMagic(c.g, x.info, "setLen", mSetLengthSeq) op = instantiateGeneric(c, op, t, t) result = newTree(nkCall, newSymNode(op, x.info), x, lenCall) proc forallElements(c: var TLiftCtx; t: PType; body, x, y: PNode) = let counterIdx = body.len let i = declareCounter(c, body, toInt64(firstOrd(c.g.config, t))) let whileLoop = genWhileLoop(c, i, x) let elemType = t.lastSon let b = if c.kind == attachedTrace: y else: y.at(i, elemType) fillBody(c, elemType, whileLoop[1], x.at(i, elemType), b) if whileLoop[1].len > 0: addIncStmt(c, whileLoop[1], i) body.add whileLoop else: body.sons.setLen counterIdx proc fillSeqOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = case c.kind of attachedAsgn, attachedDeepCopy: # we generate: # setLen(dest, y.len) # var i = 0 # while i < y.len: dest[i] = y[i]; inc(i) # This is usually more efficient than a destroy/create pair. body.add setLenSeqCall(c, t, x, y) forallElements(c, t, body, x, y) of attachedSink: let moveCall = genBuiltin(c, mMove, "move", x) moveCall.add y doAssert t.destructor != nil moveCall.add destructorCall(c, t.destructor, x) body.add moveCall of attachedDestructor: # destroy all elements: forallElements(c, t, body, x, y) body.add genBuiltin(c, mDestroy, "destroy", x) of attachedTrace: if canFormAcycle(t.elemType): # follow all elements: forallElements(c, t, body, x, y) proc useSeqOrStrOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = createTypeBoundOps(c.g, c.c, t, body.info, c.idgen) # recursions are tricky, so we might need to forward the generated # operation here: var t = t if t.assignment == nil or t.destructor == nil: let h = sighashes.hashType(t, {CoType, CoConsiderOwned, CoDistinct}) let canon = c.g.canonTypes.getOrDefault(h) if canon != nil: t = canon case c.kind of attachedAsgn, attachedDeepCopy: # XXX: replace these with assertions. if t.assignment == nil: return # protect from recursion body.add newHookCall(c, t.assignment, x, y) of attachedSink: # we always inline the move for better performance: let moveCall = genBuiltin(c, mMove, "move", x) moveCall.add y doAssert t.destructor != nil moveCall.add destructorCall(c, t.destructor, x) body.add moveCall # alternatively we could do this: when false: doAssert t.asink != nil body.add newHookCall(c, t.asink, x, y) of attachedDestructor: doAssert t.destructor != nil body.add destructorCall(c, t.destructor, x) of attachedTrace: if t.kind != tyString and canFormAcycle(t.elemType): let op = getAttachedOp(c.g, t, c.kind) if op == nil: return # protect from recursion body.add newHookCall(c, op, x, y) proc fillStrOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = case c.kind of attachedAsgn, attachedDeepCopy: body.add callCodegenProc(c.g, "nimAsgnStrV2", c.info, genAddr(c, x), y) of attachedSink: let moveCall = genBuiltin(c, mMove, "move", x) moveCall.add y doAssert t.destructor != nil moveCall.add destructorCall(c, t.destructor, x) body.add moveCall of attachedDestructor: body.add genBuiltin(c, mDestroy, "destroy", x) of attachedTrace: discard "strings are atomic and have no inner elements that are to trace" proc cyclicType*(t: PType): bool = case t.kind of tyRef: result = types.canFormAcycle(t.lastSon) of tyProc: result = t.callConv == ccClosure else: result = false proc atomicRefOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = #[ bug #15753 is really subtle. Usually the classical write barrier for reference counting looks like this:: incRef source # increment first; this takes care of self-assignments1 decRef dest dest[] = source However, 'decRef dest' might trigger a cycle collection and then the collector traverses the graph. It is crucial that when it follows the pointers the assignment 'dest[] = source' already happened so that we don't do trial deletion on a wrong graph -- this causes premature freeing of objects! The correct barrier looks like this:: let tmp = dest incRef source dest[] = source decRef tmp ]# var actions = newNodeI(nkStmtList, c.info) let elemType = t.lastSon createTypeBoundOps(c.g, c.c, elemType, c.info, c.idgen) let isCyclic = c.g.config.selectedGC == gcOrc and types.canFormAcycle(elemType) let tmp = if isCyclic and c.kind in {attachedAsgn, attachedSink}: declareTempOf(c, body, x) else: x if isFinal(elemType): addDestructorCall(c, elemType, actions, genDeref(tmp, nkDerefExpr)) var alignOf = genBuiltin(c, mAlignOf, "alignof", newNodeIT(nkType, c.info, elemType)) alignOf.typ = getSysType(c.g, c.info, tyInt) actions.add callCodegenProc(c.g, "nimRawDispose", c.info, tmp, alignOf) else: addDestructorCall(c, elemType, newNodeI(nkStmtList, c.info), genDeref(tmp, nkDerefExpr)) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, tmp) var cond: PNode if isCyclic: if isFinal(elemType): let typInfo = genBuiltin(c, mGetTypeInfoV2, "getTypeInfoV2", newNodeIT(nkType, x.info, elemType)) typInfo.typ = getSysType(c.g, c.info, tyPointer) cond = callCodegenProc(c.g, "nimDecRefIsLastCyclicStatic", c.info, tmp, typInfo) else: cond = callCodegenProc(c.g, "nimDecRefIsLastCyclicDyn", c.info, tmp) else: cond = callCodegenProc(c.g, "nimDecRefIsLast", c.info, x) cond.typ = getSysType(c.g, x.info, tyBool) case c.kind of attachedSink: if isCyclic: body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedAsgn: if isCyclic: body.add genIf(c, y, callCodegenProc(c.g, "nimIncRefCyclic", c.info, y, getCycleParam(c))) body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, y, callCodegenProc(c.g, "nimIncRef", c.info, y)) body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, cond, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: if isCyclic: if isFinal(elemType): let typInfo = genBuiltin(c, mGetTypeInfoV2, "getTypeInfoV2", newNodeIT(nkType, x.info, elemType)) typInfo.typ = getSysType(c.g, c.info, tyPointer) body.add callCodegenProc(c.g, "nimTraceRef", c.info, genAddrOf(x, c.idgen), typInfo, y) else: # If the ref is polymorphic we have to account for this body.add callCodegenProc(c.g, "nimTraceRefDyn", c.info, genAddrOf(x, c.idgen), y) #echo "can follow ", elemType, " static ", isFinal(elemType) proc atomicClosureOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = ## Closures are really like refs except they always use a virtual destructor ## and we need to do the refcounting only on the ref field which we call 'xenv': let xenv = genBuiltin(c, mAccessEnv, "accessEnv", x) xenv.typ = getSysType(c.g, c.info, tyPointer) let isCyclic = c.g.config.selectedGC == gcOrc let tmp = if isCyclic and c.kind in {attachedAsgn, attachedSink}: declareTempOf(c, body, xenv) else: xenv var actions = newNodeI(nkStmtList, c.info) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, tmp) let decRefProc = if isCyclic: "nimDecRefIsLastCyclicDyn" else: "nimDecRefIsLast" let cond = callCodegenProc(c.g, decRefProc, c.info, tmp) cond.typ = getSysType(c.g, x.info, tyBool) case c.kind of attachedSink: if isCyclic: body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedAsgn: let yenv = genBuiltin(c, mAccessEnv, "accessEnv", y) yenv.typ = getSysType(c.g, c.info, tyPointer) if isCyclic: body.add genIf(c, yenv, callCodegenProc(c.g, "nimIncRefCyclic", c.info, yenv, getCycleParam(c))) body.add newAsgnStmt(x, y) body.add genIf(c, cond, actions) else: body.add genIf(c, yenv, callCodegenProc(c.g, "nimIncRef", c.info, yenv)) body.add genIf(c, cond, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, cond, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: body.add callCodegenProc(c.g, "nimTraceRefDyn", c.info, genAddrOf(xenv, c.idgen), y) proc weakrefOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = case c.kind of attachedSink: # we 'nil' y out afterwards so we *need* to take over its reference # count value: body.add genIf(c, x, callCodegenProc(c.g, "nimDecWeakRef", c.info, x)) body.add newAsgnStmt(x, y) of attachedAsgn: body.add genIf(c, y, callCodegenProc(c.g, "nimIncRef", c.info, y)) body.add genIf(c, x, callCodegenProc(c.g, "nimDecWeakRef", c.info, x)) body.add newAsgnStmt(x, y) of attachedDestructor: # it's better to prepend the destruction of weak refs in order to # prevent wrong "dangling refs exist" problems: var actions = newNodeI(nkStmtList, c.info) actions.add callCodegenProc(c.g, "nimDecWeakRef", c.info, x) let des = genIf(c, x, actions) if body.len == 0: body.add des else: body.sons.insert(des, 0) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: discard proc ownedRefOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = var actions = newNodeI(nkStmtList, c.info) let elemType = t.lastSon #fillBody(c, elemType, actions, genDeref(x), genDeref(y)) #var disposeCall = genBuiltin(c, mDispose, "dispose", x) if isFinal(elemType): addDestructorCall(c, elemType, actions, genDeref(x, nkDerefExpr)) var alignOf = genBuiltin(c, mAlignOf, "alignof", newNodeIT(nkType, c.info, elemType)) alignOf.typ = getSysType(c.g, c.info, tyInt) actions.add callCodegenProc(c.g, "nimRawDispose", c.info, x, alignOf) else: addDestructorCall(c, elemType, newNodeI(nkStmtList, c.info), genDeref(x, nkDerefExpr)) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, x) case c.kind of attachedSink, attachedAsgn: body.add genIf(c, x, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, x, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: discard proc closureOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = if c.kind == attachedDeepCopy: # a big problem is that we don't know the environment's type here, so we # have to go through some indirection; we delegate this to the codegen: let call = newNodeI(nkCall, c.info, 2) call.typ = t call[0] = newSymNode(createMagic(c.g, c.idgen, "deepCopy", mDeepCopy)) call[1] = y body.add newAsgnStmt(x, call) elif (optOwnedRefs in c.g.config.globalOptions and optRefCheck in c.g.config.options) or c.g.config.selectedGC in {gcArc, gcOrc}: let xx = genBuiltin(c, mAccessEnv, "accessEnv", x) xx.typ = getSysType(c.g, c.info, tyPointer) case c.kind of attachedSink: # we 'nil' y out afterwards so we *need* to take over its reference # count value: body.add genIf(c, xx, callCodegenProc(c.g, "nimDecWeakRef", c.info, xx)) body.add newAsgnStmt(x, y) of attachedAsgn: let yy = genBuiltin(c, mAccessEnv, "accessEnv", y) yy.typ = getSysType(c.g, c.info, tyPointer) body.add genIf(c, yy, callCodegenProc(c.g, "nimIncRef", c.info, yy)) body.add genIf(c, xx, callCodegenProc(c.g, "nimDecWeakRef", c.info, xx)) body.add newAsgnStmt(x, y) of attachedDestructor: let des = genIf(c, xx, callCodegenProc(c.g, "nimDecWeakRef", c.info, xx)) if body.len == 0: body.add des else: body.sons.insert(des, 0) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: discard proc ownedClosureOp(c: var TLiftCtx; t: PType; body, x, y: PNode) = let xx = genBuiltin(c, mAccessEnv, "accessEnv", x) xx.typ = getSysType(c.g, c.info, tyPointer) var actions = newNodeI(nkStmtList, c.info) #discard addDestructorCall(c, elemType, newNodeI(nkStmtList, c.info), genDeref(xx)) actions.add callCodegenProc(c.g, "nimDestroyAndDispose", c.info, xx) case c.kind of attachedSink, attachedAsgn: body.add genIf(c, xx, actions) body.add newAsgnStmt(x, y) of attachedDestructor: body.add genIf(c, xx, actions) of attachedDeepCopy: assert(false, "cannot happen") of attachedTrace: discard proc fillBody(c: var TLiftCtx; t: PType; body, x, y: PNode) = case t.kind of tyNone, tyEmpty, tyVoid: discard of tyPointer, tySet, tyBool, tyChar, tyEnum, tyInt..tyUInt64, tyCstring, tyPtr, tyUncheckedArray, tyVar, tyLent: defaultOp(c, t, body, x, y) of tyRef: if c.g.config.selectedGC in {gcArc, gcOrc}: atomicRefOp(c, t, body, x, y) elif (optOwnedRefs in c.g.config.globalOptions and optRefCheck in c.g.config.options): weakrefOp(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyProc: if t.callConv == ccClosure: if c.g.config.selectedGC in {gcArc, gcOrc}: atomicClosureOp(c, t, body, x, y) else: closureOp(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyOwned: let base = t.skipTypes(abstractInstOwned) if optOwnedRefs in c.g.config.globalOptions: case base.kind of tyRef: ownedRefOp(c, base, body, x, y) return of tyProc: if base.callConv == ccClosure: ownedClosureOp(c, base, body, x, y) return else: discard defaultOp(c, base, body, x, y) of tyArray: if tfHasAsgn in t.flags or useNoGc(c, t): forallElements(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tySequence: if useNoGc(c, t): useSeqOrStrOp(c, t, body, x, y) elif optSeqDestructors in c.g.config.globalOptions: # note that tfHasAsgn is propagated so we need the check on # 'selectedGC' here to determine if we have the new runtime. discard considerUserDefinedOp(c, t, body, x, y) elif tfHasAsgn in t.flags: if c.kind in {attachedAsgn, attachedSink, attachedDeepCopy}: body.add newSeqCall(c, x, y) forallElements(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyString: if useNoGc(c, t): useSeqOrStrOp(c, t, body, x, y) elif tfHasAsgn in t.flags: discard considerUserDefinedOp(c, t, body, x, y) else: defaultOp(c, t, body, x, y) of tyObject: if not considerUserDefinedOp(c, t, body, x, y): if c.kind in {attachedAsgn, attachedSink} and t.sym != nil and sfImportc in t.sym.flags: body.add newAsgnStmt(x, y) else: fillBodyObjT(c, t, body, x, y) of tyDistinct: if not considerUserDefinedOp(c, t, body, x, y): fillBody(c, t[0], body, x, y) of tyTuple: fillBodyTup(c, t, body, x, y) of tyVarargs, tyOpenArray: if c.kind == attachedDestructor and (tfHasAsgn in t.flags or useNoGc(c, t)): forallElements(c, t, body, x, y) else: discard "cannot copy openArray" of tyFromExpr, tyProxy, tyBuiltInTypeClass, tyUserTypeClass, tyUserTypeClassInst, tyCompositeTypeClass, tyAnd, tyOr, tyNot, tyAnything, tyGenericParam, tyGenericBody, tyNil, tyUntyped, tyTyped, tyTypeDesc, tyGenericInvocation, tyForward, tyStatic: #internalError(c.g.config, c.info, "assignment requested for type: " & typeToString(t)) discard of tyOrdinal, tyRange, tyInferred, tyGenericInst, tyAlias, tySink: fillBody(c, lastSon(t), body, x, y) of tyConcept, tyIterable: doAssert false proc produceSymDistinctType(g: ModuleGraph; c: PContext; typ: PType; kind: TTypeAttachedOp; info: TLineInfo; idgen: IdGenerator): PSym = assert typ.kind == tyDistinct let baseType = typ[0] if getAttachedOp(g, baseType, kind) == nil: discard produceSym(g, c, baseType, kind, info, idgen) result = getAttachedOp(g, baseType, kind) setAttachedOp(g, idgen.module, typ, kind, result) proc symPrototype(g: ModuleGraph; typ: PType; owner: PSym; kind: TTypeAttachedOp; info: TLineInfo; idgen: IdGenerator): PSym = let procname = getIdent(g.cache, AttachedOpToStr[kind]) result = newSym(skProc, procname, nextSymId(idgen), owner, info) let dest = newSym(skParam, getIdent(g.cache, "dest"), nextSymId(idgen), result, info) let src = newSym(skParam, getIdent(g.cache, if kind == attachedTrace: "env" else: "src"), nextSymId(idgen), result, info) dest.typ = makeVarType(typ.owner, typ, idgen) if kind == attachedTrace: src.typ = getSysType(g, info, tyPointer) else: src.typ = typ result.typ = newProcType(info, nextTypeId(idgen), owner) result.typ.addParam dest if kind != attachedDestructor: result.typ.addParam src if kind == attachedAsgn and g.config.selectedGC == gcOrc and cyclicType(typ.skipTypes(abstractInst)): let cycleParam = newSym(skParam, getIdent(g.cache, "cyclic"), nextSymId(idgen), result, info) cycleParam.typ = getSysType(g, info, tyBool) result.typ.addParam cycleParam var n = newNodeI(nkProcDef, info, bodyPos+1) for i in 0.. mDestroy # 3. we have a lifted destructor. # 4. We have a custom destructor. # 5. We have a (custom) generic destructor. # we do not generate '=trace' procs if we # have the cycle detection disabled, saves code size. let lastAttached = if g.config.selectedGC == gcOrc: attachedTrace else: attachedSink # bug #15122: We need to produce all prototypes before entering the # mind boggling recursion. Hacks like these imply we should rewrite # this module. var generics: array[attachedDestructor..attachedTrace, bool] for k in attachedDestructor..lastAttached: generics[k] = getAttachedOp(g, canon, k) != nil if not generics[k]: setAttachedOp(g, idgen.module, canon, k, symPrototype(g, canon, canon.owner, k, info, idgen)) # we generate the destructor first so that other operators can depend on it: for k in attachedDestructor..lastAttached: if not generics[k]: discard produceSym(g, c, canon, k, info, idgen) else: inst(g, c, canon, k, idgen, info) if canon != orig: setAttachedOp(g, idgen.module, orig, k, getAttachedOp(g, canon, k)) if not isTrival(getAttachedOp(g, orig, attachedDestructor)): #or not isTrival(orig.assignment) or # not isTrival(orig.sink): orig.flags.incl tfHasAsgn # ^ XXX Breaks IC!