diff options
author | Araq <rumpf_a@web.de> | 2013-12-28 01:57:19 +0100 |
---|---|---|
committer | Araq <rumpf_a@web.de> | 2013-12-28 01:57:19 +0100 |
commit | f2b9905b4e4365e0dd51a67ad90ac690f584feca (patch) | |
tree | ea47686702c4b87ca77dfb33f279667370e1b2de /compiler | |
parent | e2648fa3fb7b00b3485b8f34daf7dcdb32d53778 (diff) | |
download | Nim-f2b9905b4e4365e0dd51a67ad90ac690f584feca.tar.gz |
case consistency part 9
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/pas2nim/pas2nim.nim | 2 | ||||
-rw-r--r-- | compiler/pas2nim/paslex.nim | 74 | ||||
-rw-r--r-- | compiler/pas2nim/pasparse.nim | 52 |
3 files changed, 64 insertions, 64 deletions
diff --git a/compiler/pas2nim/pas2nim.nim b/compiler/pas2nim/pas2nim.nim index ce5eb5c1a..d10028167 100644 --- a/compiler/pas2nim/pas2nim.nim +++ b/compiler/pas2nim/pas2nim.nim @@ -26,7 +26,7 @@ Options: """ proc main(infile, outfile: string, flags: set[TParserFlag]) = - var stream = LLStreamOpen(infile, fmRead) + var stream = llStreamOpen(infile, fmRead) if stream == nil: rawMessage(errCannotOpenFile, infile) var p: TParser openParser(p, infile, stream, flags) diff --git a/compiler/pas2nim/paslex.nim b/compiler/pas2nim/paslex.nim index 94e664832..67473e71f 100644 --- a/compiler/pas2nim/paslex.nim +++ b/compiler/pas2nim/paslex.nim @@ -78,7 +78,7 @@ type proc getTok*(L: var TLexer, tok: var TToken) -proc PrintTok*(tok: TToken) +proc printTok*(tok: TToken) proc `$`*(tok: TToken): string # implementation @@ -109,17 +109,17 @@ proc getLineInfo*(L: TLexer): TLineInfo = result = newLineInfo(L.filename, L.linenumber, getColNumber(L, L.bufpos)) proc lexMessage*(L: TLexer, msg: TMsgKind, arg = "") = - msgs.GlobalError(getLineInfo(L), msg, arg) + msgs.globalError(getLineInfo(L), msg, arg) proc lexMessagePos(L: var TLexer, msg: TMsgKind, pos: int, arg = "") = var info = newLineInfo(L.filename, L.linenumber, pos - L.lineStart) - msgs.GlobalError(info, msg, arg) + msgs.globalError(info, msg, arg) -proc TokKindToStr*(k: TTokKind): string = +proc tokKindToStr*(k: TTokKind): string = case k of pxEof: result = "[EOF]" of firstKeyword..lastKeyword: - result = keywords[ord(k)-ord(firstKeyword)] + result = Keywords[ord(k)-ord(firstKeyword)] of pxInvalid, pxComment, pxStrLit: result = "string literal" of pxCommand: result = "{@" of pxAmp: result = "{&" @@ -160,9 +160,9 @@ proc `$`(tok: TToken): string = of pxSymbol: result = tok.ident.s of pxIntLit, pxInt64Lit: result = $tok.iNumber of pxFloatLit: result = $tok.fNumber - else: result = TokKindToStr(tok.xkind) + else: result = tokKindToStr(tok.xkind) -proc PrintTok(tok: TToken) = +proc printTok(tok: TToken) = writeln(stdout, $tok) proc setKeyword(L: var TLexer, tok: var TToken) = @@ -177,12 +177,12 @@ proc matchUnderscoreChars(L: var TLexer, tok: var TToken, chars: TCharSet) = while true: if buf[pos] in chars: add(tok.literal, buf[pos]) - Inc(pos) + inc(pos) else: break if buf[pos] == '_': add(tok.literal, '_') - Inc(pos) + inc(pos) L.bufPos = pos proc isFloatLiteral(s: string): bool = @@ -199,7 +199,7 @@ proc getNumber2(L: var TLexer, tok: var TToken) = inc(L.bufpos) return tok.base = base2 - var xi: biggestInt = 0 + var xi: BiggestInt = 0 var bits = 0 while true: case L.buf[pos] @@ -221,7 +221,7 @@ proc getNumber2(L: var TLexer, tok: var TToken) = proc getNumber16(L: var TLexer, tok: var TToken) = var pos = L.bufpos + 1 # skip $ tok.base = base16 - var xi: biggestInt = 0 + var xi: BiggestInt = 0 var bits = 0 while true: case L.buf[pos] @@ -261,7 +261,7 @@ proc getNumber10(L: var TLexer, tok: var TToken) = tok.fnumber = parseFloat(tok.literal) tok.xkind = pxFloatLit else: - tok.iNumber = ParseInt(tok.literal) + tok.iNumber = parseInt(tok.literal) if (tok.iNumber < low(int32)) or (tok.iNumber > high(int32)): tok.xkind = pxInt64Lit else: @@ -271,10 +271,10 @@ proc getNumber10(L: var TLexer, tok: var TToken) = except EOverflow: lexMessage(L, errNumberOutOfRange, tok.literal) -proc HandleCRLF(L: var TLexer, pos: int): int = +proc handleCRLF(L: var TLexer, pos: int): int = case L.buf[pos] - of CR: result = nimlexbase.HandleCR(L, pos) - of LF: result = nimlexbase.HandleLF(L, pos) + of CR: result = nimlexbase.handleCR(L, pos) + of LF: result = nimlexbase.handleLF(L, pos) else: result = pos proc getString(L: var TLexer, tok: var TToken) = @@ -319,7 +319,7 @@ proc getString(L: var TLexer, tok: var TToken) = xi = (xi * 10) + (ord(buf[pos]) - ord('0')) inc(pos) else: lexMessage(L, errInvalidCharacterConstant) - if (xi <= 255): add(tok.literal, Chr(xi)) + if (xi <= 255): add(tok.literal, chr(xi)) else: lexMessage(L, errInvalidCharacterConstant) else: break @@ -334,17 +334,17 @@ proc getSymbol(L: var TLexer, tok: var TToken) = var c = buf[pos] case c of 'a'..'z', '0'..'9', '\x80'..'\xFF': - h = h +% Ord(c) + h = h +% ord(c) h = h +% h shl 10 h = h xor (h shr 6) of 'A'..'Z': c = chr(ord(c) + (ord('a') - ord('A'))) # toLower() - h = h +% Ord(c) + h = h +% ord(c) h = h +% h shl 10 h = h xor (h shr 6) of '_': nil else: break - Inc(pos) + inc(pos) h = h +% h shl 3 h = h xor (h shr 11) h = h +% h shl 15 @@ -385,7 +385,7 @@ proc scanCurlyComment(L: var TLexer, tok: var TToken) = while true: case buf[pos] of CR, LF: - pos = HandleCRLF(L, pos) + pos = handleCRLF(L, pos) buf = L.buf add(tok.literal, "\n#") of '}': @@ -405,7 +405,7 @@ proc scanStarComment(L: var TLexer, tok: var TToken) = while true: case buf[pos] of CR, LF: - pos = HandleCRLF(L, pos) + pos = handleCRLF(L, pos) buf = L.buf add(tok.literal, "\n#") of '*': @@ -428,9 +428,9 @@ proc skip(L: var TLexer, tok: var TToken) = while true: case buf[pos] of ' ', Tabulator: - Inc(pos) # newline is special: + inc(pos) # newline is special: of CR, LF: - pos = HandleCRLF(L, pos) + pos = handleCRLF(L, pos) buf = L.buf else: break # EndOfFile also leaves the loop @@ -449,7 +449,7 @@ proc getTok(L: var TLexer, tok: var TToken) = case c of ';': tok.xkind = pxSemicolon - Inc(L.bufpos) + inc(L.bufpos) of '/': if L.buf[L.bufpos + 1] == '/': scanLineComment(L, tok) @@ -458,12 +458,12 @@ proc getTok(L: var TLexer, tok: var TToken) = inc(L.bufpos) of ',': tok.xkind = pxComma - Inc(L.bufpos) + inc(L.bufpos) of '(': - Inc(L.bufpos) + inc(L.bufpos) if (L.buf[L.bufPos] == '*'): if (L.buf[L.bufPos + 1] == '$'): - Inc(L.bufpos, 2) + inc(L.bufpos, 2) skip(L, tok) getSymbol(L, tok) tok.xkind = pxStarDirLe @@ -481,12 +481,12 @@ proc getTok(L: var TLexer, tok: var TToken) = tok.xkind = pxStar of ')': tok.xkind = pxParRi - Inc(L.bufpos) + inc(L.bufpos) of '[': - Inc(L.bufpos) + inc(L.bufpos) tok.xkind = pxBracketLe of ']': - Inc(L.bufpos) + inc(L.bufpos) tok.xkind = pxBracketRi of '.': inc(L.bufpos) @@ -496,21 +496,21 @@ proc getTok(L: var TLexer, tok: var TToken) = else: tok.xkind = pxDot of '{': - Inc(L.bufpos) + inc(L.bufpos) case L.buf[L.bufpos] of '$': - Inc(L.bufpos) + inc(L.bufpos) skip(L, tok) getSymbol(L, tok) tok.xkind = pxCurlyDirLe of '&': - Inc(L.bufpos) + inc(L.bufpos) tok.xkind = pxAmp of '%': - Inc(L.bufpos) + inc(L.bufpos) tok.xkind = pxPer of '@': - Inc(L.bufpos) + inc(L.bufpos) tok.xkind = pxCommand else: scanCurlyComment(L, tok) of '+': @@ -554,7 +554,7 @@ proc getTok(L: var TLexer, tok: var TToken) = inc(L.bufpos) of '}': tok.xkind = pxCurlyDirRi - Inc(L.bufpos) + inc(L.bufpos) of '\'', '#': getString(L, tok) of '$': @@ -567,4 +567,4 @@ proc getTok(L: var TLexer, tok: var TToken) = tok.literal = c & "" tok.xkind = pxInvalid lexMessage(L, errInvalidToken, c & " (\\" & $(ord(c)) & ')') - Inc(L.bufpos) + inc(L.bufpos) diff --git a/compiler/pas2nim/pasparse.nim b/compiler/pas2nim/pasparse.nim index 61d57dec3..928896338 100644 --- a/compiler/pas2nim/pasparse.nim +++ b/compiler/pas2nim/pasparse.nim @@ -57,7 +57,7 @@ const ["tbinaryfile", "tfile"], ["strstart", "0"], ["nl", "\"\\n\""], ["tostring", "$"]] -proc ParseUnit*(p: var TParser): PNode +proc parseUnit*(p: var TParser): PNode proc openParser*(p: var TParser, filename: string, inputStream: PLLStream, flags: set[TParserFlag] = {}) proc closeParser*(p: var TParser) @@ -67,20 +67,20 @@ proc fixRecordDef*(n: var PNode) # implementation -proc OpenParser(p: var TParser, filename: string, +proc openParser(p: var TParser, filename: string, inputStream: PLLStream, flags: set[TParserFlag] = {}) = - OpenLexer(p.lex, filename, inputStream) + openLexer(p.lex, filename, inputStream) initIdTable(p.repl) for i in countup(low(stdReplacements), high(stdReplacements)): - IdTablePut(p.repl, getIdent(stdReplacements[i][0]), + idTablePut(p.repl, getIdent(stdReplacements[i][0]), getIdent(stdReplacements[i][1])) if pfMoreReplacements in flags: for i in countup(low(nimReplacements), high(nimReplacements)): - IdTablePut(p.repl, getIdent(nimReplacements[i][0]), + idTablePut(p.repl, getIdent(nimReplacements[i][0]), getIdent(nimReplacements[i][1])) p.flags = flags -proc CloseParser(p: var TParser) = CloseLexer(p.lex) +proc closeParser(p: var TParser) = closeLexer(p.lex) proc getTok(p: var TParser) = getTok(p.lex, p.tok) proc parMessage(p: TParser, msg: TMsgKind, arg = "") = @@ -98,15 +98,15 @@ proc skipCom(p: var TParser, n: PNode) = parMessage(p, warnCommentXIgnored, p.tok.literal) getTok(p) -proc ExpectIdent(p: TParser) = +proc expectIdent(p: TParser) = if p.tok.xkind != pxSymbol: lexMessage(p.lex, errIdentifierExpected, $(p.tok)) -proc Eat(p: var TParser, xkind: TTokKind) = +proc eat(p: var TParser, xkind: TTokKind) = if p.tok.xkind == xkind: getTok(p) - else: lexMessage(p.lex, errTokenExpected, TokKindToStr(xkind)) + else: lexMessage(p.lex, errTokenExpected, tokKindToStr(xkind)) -proc Opt(p: var TParser, xkind: TTokKind) = +proc opt(p: var TParser, xkind: TTokKind) = if p.tok.xkind == xkind: getTok(p) proc newNodeP(kind: TNodeKind, p: TParser): PNode = @@ -131,7 +131,7 @@ proc newIdentNodeP(ident: PIdent, p: TParser): PNode = proc createIdentNodeP(ident: PIdent, p: TParser): PNode = result = newNodeP(nkIdent, p) - var x = PIdent(IdTableGet(p.repl, ident)) + var x = PIdent(idTableGet(p.repl, ident)) if x != nil: result.ident = x else: result.ident = ident @@ -170,7 +170,7 @@ proc parseCommand(p: var TParser, definition: PNode = nil): PNode = getTok(p) eat(p, pxCurlyDirRi) result = parseExpr(p) - if result.kind == nkEmpty: InternalError("emptyNode modified") + if result.kind == nkEmpty: internalError("emptyNode modified") result.kind = nkCurly elif p.tok.ident.id == getIdent("cast").id: getTok(p) @@ -251,7 +251,7 @@ proc bracketExprList(p: var TParser, first: PNode): PNode = getTok(p) break if p.tok.xkind == pxEof: - parMessage(p, errTokenExpected, TokKindToStr(pxBracketRi)) + parMessage(p, errTokenExpected, tokKindToStr(pxBracketRi)) break var a = rangeExpr(p) skipCom(p, a) @@ -281,7 +281,7 @@ proc exprListAux(p: var TParser, elemKind: TNodeKind, getTok(p) break if p.tok.xkind == pxEof: - parMessage(p, errTokenExpected, TokKindToStr(endtok)) + parMessage(p, errTokenExpected, tokKindToStr(endTok)) break var a = exprColonEqExpr(p, elemKind, sepTok) skipCom(p, a) @@ -319,7 +319,7 @@ proc qualifiedIdentListAux(p: var TParser, endTok: TTokKind, getTok(p) break if p.tok.xkind == pxEof: - parMessage(p, errTokenExpected, TokKindToStr(endtok)) + parMessage(p, errTokenExpected, tokKindToStr(endTok)) break var a = qualifiedIdent(p) skipCom(p, a) @@ -584,7 +584,7 @@ proc parseIncludeDir(p: var TParser): PNode = proc definedExprAux(p: var TParser): PNode = result = newNodeP(nkCall, p) addSon(result, newIdentNodeP(getIdent("defined"), p)) - ExpectIdent(p) + expectIdent(p) addSon(result, createIdentNodeP(p.tok.ident, p)) getTok(p) @@ -753,7 +753,7 @@ proc parseCase(p: var TParser): PNode = while (p.tok.xkind != pxEof) and (p.tok.xkind != pxColon): addSon(b, rangeExpr(p)) opt(p, pxComma) - skipcom(p, b) + skipCom(p, b) eat(p, pxColon) skipCom(p, b) addSon(b, parseStmt(p)) @@ -818,7 +818,7 @@ proc parseFor(p: var TParser): PNode = getTok(p) b = parseExpr(p) else: - parMessage(p, errTokenExpected, TokKindToStr(pxTo)) + parMessage(p, errTokenExpected, tokKindToStr(pxTo)) addSon(c, a) addSon(c, b) eat(p, pxDo) @@ -915,7 +915,7 @@ proc parseCallingConvention(p: var TParser): PNode = getTok(p) opt(p, pxSemicolon) else: - nil + discard proc parseRoutineSpecifiers(p: var TParser, noBody: var bool): PNode = var e: PNode @@ -1096,7 +1096,7 @@ proc parseRecordCase(p: var TParser): PNode = while (p.tok.xkind != pxEof) and (p.tok.xkind != pxColon): addSon(b, rangeExpr(p)) opt(p, pxComma) - skipcom(p, b) + skipCom(p, b) eat(p, pxColon) skipCom(p, b) c = newNodeP(nkRecList, p) @@ -1168,7 +1168,7 @@ proc addPragmaToIdent(ident: var PNode, pragma: PNode) = else: pragmasNode = ident.sons[1] if pragmasNode.kind != nkPragma: - InternalError(ident.info, "addPragmaToIdent") + internalError(ident.info, "addPragmaToIdent") addSon(pragmasNode, pragma) proc parseRecordBody(p: var TParser, result, definition: PNode) = @@ -1183,13 +1183,13 @@ proc parseRecordBody(p: var TParser, result, definition: PNode) = if definition != nil: addPragmaToIdent(definition.sons[0], newIdentNodeP(p.tok.ident, p)) else: - InternalError(result.info, "anonymous record is not supported") + internalError(result.info, "anonymous record is not supported") getTok(p) else: - InternalError(result.info, "parseRecordBody") + internalError(result.info, "parseRecordBody") of pxCommand: if definition != nil: addPragmaToIdent(definition.sons[0], parseCommand(p)) - else: InternalError(result.info, "anonymous record is not supported") + else: internalError(result.info, "anonymous record is not supported") else: nil opt(p, pxSemicolon) @@ -1223,7 +1223,7 @@ proc parseTypeDesc(p: var TParser, definition: PNode = nil): PNode = getTok(p) if p.tok.xkind == pxCommand: result = parseCommand(p) - if result.kind != nkTupleTy: InternalError(result.info, "parseTypeDesc") + if result.kind != nkTupleTy: internalError(result.info, "parseTypeDesc") parseRecordBody(p, result, definition) var a = lastSon(result) # embed nkRecList directly into nkTupleTy for i in countup(0, sonsLen(a) - 1): @@ -1237,7 +1237,7 @@ proc parseTypeDesc(p: var TParser, definition: PNode = nil): PNode = if definition != nil: addPragmaToIdent(definition.sons[0], newIdentNodeP(getIdent("final"), p)) else: - InternalError(result.info, "anonymous record is not supported") + internalError(result.info, "anonymous record is not supported") of pxObject: result = parseRecordOrObject(p, nkObjectTy, definition) of pxParLe: result = parseEnum(p) of pxArray: |