summary refs log tree commit diff stats
path: root/compiler/aliases.nim
blob: 7accb8ce30a83511720eea7646e60f09703fbf47 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
#
#
#           The Nimrod Compiler
#        (c) Copyright 2012 Andreas Rumpf
#
#    See the file "copying.txt", included in this
#    distribution, for details about the copyright.
#

## Simple alias analysis for the HLO and the code generators.

import
  ast, astalgo, types, trees, intsets, msgs
  
type
  TAnalysisResult* = enum
    arNo, arMaybe, arYes

proc isPartOfAux(a, b: PType, marker: var TIntSet): TAnalysisResult

proc isPartOfAux(n: PNode, b: PType, marker: var TIntSet): TAnalysisResult =
  result = arNo
  case n.kind
  of nkRecList: 
    for i in countup(0, sonsLen(n) - 1): 
      result = isPartOfAux(n.sons[i], b, marker)
      if result == arYes: return
  of nkRecCase:
    assert(n.sons[0].kind == nkSym)
    result = isPartOfAux(n.sons[0], b, marker)
    if result == arYes: return
    for i in countup(1, sonsLen(n) - 1): 
      case n.sons[i].kind
      of nkOfBranch, nkElse: 
        result = isPartOfAux(lastSon(n.sons[i]), b, marker)
        if result == arYes: return
      else: internalError("isPartOfAux(record case branch)")
  of nkSym:
    result = isPartOfAux(n.sym.typ, b, marker)
  else: internalError(n.info, "isPartOfAux()")
  
proc isPartOfAux(a, b: PType, marker: var TIntSet): TAnalysisResult = 
  result = arNo
  if a == nil or b == nil: return 
  if ContainsOrIncl(marker, a.id): return 
  if compareTypes(a, b, dcEqIgnoreDistinct): return arYes
  case a.kind
  of tyObject: 
    result = isPartOfAux(a.sons[0], b, marker)
    if result == arNo: result = isPartOfAux(a.n, b, marker)
  of tyGenericInst, tyDistinct:
    result = isPartOfAux(lastSon(a), b, marker)
  of tyArray, tyArrayConstr, tySet, tyTuple: 
    for i in countup(0, sonsLen(a) - 1): 
      result = isPartOfAux(a.sons[i], b, marker)
      if result == arYes: return 
  else: nil

proc isPartOf(a, b: PType): TAnalysisResult = 
  ## checks iff 'a' can be part of 'b'. Iterates over VALUE types!
  var marker = InitIntSet()
  # watch out: parameters reversed because I'm too lazy to change the code...
  result = isPartOfAux(b, a, marker)

proc isPartOf*(a, b: PNode): TAnalysisResult =
  ## checks if location `a` can be part of location `b`. We treat seqs and
  ## strings as pointers because the code gen often just passes them as such.
  ##
  ## Note: `a` can only be part of `b`, if `a`'s type can be part of `b`'s
  ## type. Since however type analysis is more expensive, we perform it only
  ## if necessary.
  ##
  ## cases: 
  ##
  ## YES-cases:
  ##  x    <| x   # for general trees
  ##  x[]  <| x
  ##  x[i] <| x
  ##  x.f  <| x
  ##  
  ## NO-cases:
  ## x           !<| y    # depending on type and symbol kind
  ## x[constA]   !<| x[constB]
  ## x.f         !<| x.g
  ## x.f         !<| y.f  iff x !<= y
  ##
  ## MAYBE-cases:
  ##
  ##  x[] ?<| y[]   iff compatible type
  ##
  ## 
  ##  x[]  ?<| y  depending on type
  ##  
  if a.kind == b.kind:
    case a.kind
    of nkSym:
      const varKinds = {skVar, skTemp, skProc}
      # same symbol: aliasing:
      if a.sym.id == b.sym.id: result = arYes
      elif a.sym.kind in varKinds or b.sym.kind in varKinds: 
        # actually, a param could alias a var but we know that cannot happen
        # here. XXX make this more generic
        result = arNo
      else:
        # use expensive type check:
        if isPartOf(a.sym.typ, b.sym.typ) != arNo:
          result = arMaybe
    of nkBracketExpr:
      result = isPartOf(a[0], b[0])
      if len(a) >= 2 and len(b) >= 2:
        # array accesses:
        if result == arYes and isDeepConstExpr(a[1]) and isDeepConstExpr(b[1]):
          # we know it's the same array and we have 2 constant indexes; 
          # if they are 
          var x = if a[1].kind == nkHiddenStdConv: a[1][1] else: a[1]
          var y = if b[1].kind == nkHiddenStdConv: b[1][1] else: b[1]
          
          if SameValue(x, y): result = arYes
          else: result = arNo
        # else: maybe and no are accurate
      else:
        # pointer derefs:
        if result != arYes:
          if isPartOf(a.typ, b.typ) != arNo: result = arMaybe
      
    of nkDotExpr:
      result = isPartOf(a[0], b[0])
      if result != arNo:
        # if the fields are different, it's not the same location
        if a[1].sym.id != b[1].sym.id:
          result = arNo

    of nkHiddenDeref, nkDerefExpr:
      result = isPartOf(a[0], b[0])
      # weaken because of indirection:
      if result != arYes:
        if isPartOf(a.typ, b.typ) != arNo: result = arMaybe
      
    of nkHiddenStdConv, nkHiddenSubConv, nkConv:
      result = isPartOf(a[1], b[1])
    of nkObjUpConv, nkObjDownConv, nkCheckedFieldExpr:
      result = isPartOf(a[0], b[0])
    else: nil
    # Calls return a new location, so a default of ``arNo`` is fine.
  else:
    # go down recursively; this is quite demanding:
    const 
      Ix0Kinds = {nkDotExpr, nkBracketExpr, nkObjUpConv, nkObjDownConv,
                  nkCheckedFieldExpr}
      Ix1Kinds = {nkHiddenStdConv, nkHiddenSubConv, nkConv}
      DerefKinds = {nkHiddenDeref, nkDerefExpr}
    case b.kind
    of Ix0Kinds:
      # a* !<| b.f  iff  a* !<| b
      result = isPartOf(a, b[0])
    
    of DerefKinds:
      # a* !<| b[] iff 
      if isPartOf(a.typ, b.typ) != arNo:
        result = isPartOf(a, b[0])
        if result == arNo: result = arMaybe
    
    of Ix1Kinds:
      # a* !<| T(b)  iff a* !<| b
      result = isPartOf(a, b[1])
    
    of nkSym:
      # b is an atom, so we have to check a:
      case a.kind
      of Ix0Kinds:
        # a.f !<| b*  iff  a.f !<| b*
        result = isPartOf(a[0], b)
      of Ix1Kinds:
        result = isPartOf(a[1], b)
      
      of DerefKinds:
        if isPartOf(a.typ, b.typ) != arNo:
          result = isPartOf(a[0], b)
          if result == arNo: result = arMaybe
      else: nil
    else: nil
class="sx">bytesRead != chunkSize: let ret = recv(s, addr(result[ri]), chunkSize-bytesRead, timeout) ri += ret bytesRead += ret s.skip(2, timeout) # Skip \c\L # Trailer headers will only be sent if the request specifies that we want # them: http://tools.ietf.org/html/rfc2616#section-3.6.1 proc parseBody(s: Socket, headers: HttpHeaders, httpVersion: string, timeout: int): string = result = "" if headers.getOrDefault"Transfer-Encoding" == "chunked": result = parseChunks(s, timeout) else: # -REGION- Content-Length # (http://tools.ietf.org/html/rfc2616#section-4.4) NR.3 var contentLengthHeader = headers.getOrDefault"Content-Length" if contentLengthHeader != "": var length = contentLengthHeader.parseint() if length > 0: result = newString(length) var received = 0 while true: if received >= length: break let r = s.recv(addr(result[received]), length-received, timeout) if r == 0: break received += r if received != length: httpError("Got invalid content length. Expected: " & $length & " got: " & $received) else: # (http://tools.ietf.org/html/rfc2616#section-4.4) NR.4 TODO # -REGION- Connection: Close # (http://tools.ietf.org/html/rfc2616#section-4.4) NR.5 if headers.getOrDefault"Connection" == "close" or httpVersion == "1.0": var buf = "" while true: buf = newString(4000) let r = s.recv(addr(buf[0]), 4000, timeout) if r == 0: break buf.setLen(r) result.add(buf) proc parseResponse(s: Socket, getBody: bool, timeout: int): Response = var parsedStatus = false var linei = 0 var fullyRead = false var line = "" result.headers = newHttpHeaders() while true: line = "" linei = 0 s.readLine(line, timeout) if line == "": break # We've been disconnected. if line == "\c\L": fullyRead = true break if not parsedStatus: # Parse HTTP version info and status code. var le = skipIgnoreCase(line, "HTTP/", linei) if le <= 0: httpError("invalid http version") inc(linei, le) le = skipIgnoreCase(line, "1.1", linei) if le > 0: result.version = "1.1" else: le = skipIgnoreCase(line, "1.0", linei) if le <= 0: httpError("unsupported http version") result.version = "1.0" inc(linei, le) # Status code linei.inc skipWhitespace(line, linei) result.status = line[linei .. ^1] parsedStatus = true else: # Parse headers var name = "" var le = parseUntil(line, name, ':', linei) if le <= 0: httpError("invalid headers") inc(linei, le) if line[linei] != ':': httpError("invalid headers") inc(linei) # Skip : result.headers[name] = line[linei.. ^1].strip() # Ensure the server isn't trying to DoS us. if result.headers.len > headerLimit: httpError("too many headers") if not fullyRead: httpError("Connection was closed before full request has been made") if getBody: result.body = parseBody(s, result.headers, result.version, timeout) else: result.body = "" type HttpMethod* = enum ## the requested HttpMethod httpHEAD, ## Asks for the response identical to the one that would ## correspond to a GET request, but without the response ## body. httpGET, ## Retrieves the specified resource. httpPOST, ## Submits data to be processed to the identified ## resource. The data is included in the body of the ## request. httpPUT, ## Uploads a representation of the specified resource. httpDELETE, ## Deletes the specified resource. httpTRACE, ## Echoes back the received request, so that a client ## can see what intermediate servers are adding or ## changing in the request. httpOPTIONS, ## Returns the HTTP methods that the server supports ## for specified address. httpCONNECT ## Converts the request connection to a transparent ## TCP/IP tunnel, usually used for proxies. {.deprecated: [THttpMethod: HttpMethod].} when not defined(ssl): type SSLContext = ref object let defaultSSLContext: SSLContext = nil else: let defaultSSLContext = newContext(verifyMode = CVerifyNone) proc newProxy*(url: string, auth = ""): Proxy = ## Constructs a new ``TProxy`` object. result = Proxy(url: parseUri(url), auth: auth) proc newMultipartData*: MultipartData = ## Constructs a new ``MultipartData`` object. MultipartData(content: @[]) proc add*(p: var MultipartData, name, content: string, filename: string = nil, contentType: string = nil) = ## Add a value to the multipart data. Raises a `ValueError` exception if ## `name`, `filename` or `contentType` contain newline characters. if {'\c','\L'} in name: raise newException(ValueError, "name contains a newline character") if filename != nil and {'\c','\L'} in filename: raise newException(ValueError, "filename contains a newline character") if contentType != nil and {'\c','\L'} in contentType: raise newException(ValueError, "contentType contains a newline character") var str = "Content-Disposition: form-data; name=\"" & name & "\"" if filename != nil: str.add("; filename=\"" & filename & "\"") str.add("\c\L") if contentType != nil: str.add("Content-Type: " & contentType & "\c\L") str.add("\c\L" & content & "\c\L") p.content.add(str) proc add*(p: var MultipartData, xs: MultipartEntries): MultipartData {.discardable.} = ## Add a list of multipart entries to the multipart data `p`. All values are ## added without a filename and without a content type. ## ## .. code-block:: Nim ## data.add({"action": "login", "format": "json"}) for name, content in xs.items: p.add(name, content) result = p proc newMultipartData*(xs: MultipartEntries): MultipartData = ## Create a new multipart data object and fill it with the entries `xs` ## directly. ## ## .. code-block:: Nim ## var data = newMultipartData({"action": "login", "format": "json"}) result = MultipartData(content: @[]) result.add(xs) proc addFiles*(p: var MultipartData, xs: openarray[tuple[name, file: string]]): MultipartData {.discardable.} = ## Add files to a multipart data object. The file will be opened from your ## disk, read and sent with the automatically determined MIME type. Raises an ## `IOError` if the file cannot be opened or reading fails. To manually ## specify file content, filename and MIME type, use `[]=` instead. ## ## .. code-block:: Nim ## data.addFiles({"uploaded_file": "public/test.html"}) var m = newMimetypes() for name, file in xs.items: var contentType: string let (_, fName, ext) = splitFile(file) if ext.len > 0: contentType = m.getMimetype(ext[1..ext.high], nil) p.add(name, readFile(file), fName & ext, contentType) result = p proc `[]=`*(p: var MultipartData, name, content: string) = ## Add a multipart entry to the multipart data `p`. The value is added ## without a filename and without a content type. ## ## .. code-block:: Nim ## data["username"] = "NimUser" p.add(name, content) proc `[]=`*(p: var MultipartData, name: string, file: tuple[name, contentType, content: string]) = ## Add a file to the multipart data `p`, specifying filename, contentType and ## content manually. ## ## .. code-block:: Nim ## data["uploaded_file"] = ("test.html", "text/html", ## "<html><head></head><body><p>test</p></body></html>") p.add(name, file.content, file.name, file.contentType) proc format(p: MultipartData): tuple[header, body: string] = if p == nil or p.content == nil or p.content.len == 0: return ("", "") # Create boundary that is not in the data to be formatted var bound: string while true: bound = $random(int.high) var found = false for s in p.content: if bound in s: found = true if not found: break result.header = "Content-Type: multipart/form-data; boundary=" & bound & "\c\L" result.body = "" for s in p.content: result.body.add("--" & bound & "\c\L" & s) result.body.add("--" & bound & "--\c\L") proc request*(url: string, httpMethod: string, extraHeaders = "", body = "", sslContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil): Response = ## | Requests ``url`` with the custom method string specified by the ## | ``httpMethod`` parameter. ## | Extra headers can be specified and must be separated by ``\c\L`` ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. var r = if proxy == nil: parseUri(url) else: proxy.url var hostUrl = if proxy == nil: r else: parseUri(url) var headers = substr(httpMethod, len("http")) # TODO: Use generateHeaders further down once it supports proxies. if proxy == nil: headers.add ' ' if r.path[0] != '/': headers.add '/' headers.add(r.path) if r.query.len > 0: headers.add("?" & r.query) else: headers.add(" " & url) headers.add(" HTTP/1.1\c\L") if hostUrl.port == "": add(headers, "Host: " & hostUrl.hostname & "\c\L") else: add(headers, "Host: " & hostUrl.hostname & ":" & hostUrl.port & "\c\L") if userAgent != "": add(headers, "User-Agent: " & userAgent & "\c\L") if proxy != nil and proxy.auth != "": let auth = base64.encode(proxy.auth, newline = "") add(headers, "Proxy-Authorization: basic " & auth & "\c\L") add(headers, extraHeaders) add(headers, "\c\L") var s = newSocket() if s == nil: raiseOSError(osLastError()) var port = net.Port(80) if r.scheme == "https": when defined(ssl): sslContext.wrapSocket(s) port = net.Port(443) else: raise newException(HttpRequestError, "SSL support is not available. Cannot connect over SSL.") if r.port != "": port = net.Port(r.port.parseInt) if timeout == -1: s.connect(r.hostname, port) else: s.connect(r.hostname, port, timeout) s.send(headers) if body != "": s.send(body) result = parseResponse(s, httpMethod != "httpHEAD", timeout) s.close() proc request*(url: string, httpMethod = httpGET, extraHeaders = "", body = "", sslContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil): Response = ## | Requests ``url`` with the specified ``httpMethod``. ## | Extra headers can be specified and must be separated by ``\c\L`` ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. result = request(url, $httpMethod, extraHeaders, body, sslContext, timeout, userAgent, proxy) proc redirection(status: string): bool = const redirectionNRs = ["301", "302", "303", "307"] for i in items(redirectionNRs): if status.startsWith(i): return true proc getNewLocation(lastURL: string, headers: HttpHeaders): string = result = headers.getOrDefault"Location" if result == "": httpError("location header expected") # Relative URLs. (Not part of the spec, but soon will be.) let r = parseUri(result) if r.hostname == "" and r.path != "": var parsed = parseUri(lastURL) parsed.path = r.path result = $parsed proc get*(url: string, extraHeaders = "", maxRedirects = 5, sslContext: SSLContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil): Response = ## | GETs the ``url`` and returns a ``Response`` object ## | This proc also handles redirection ## | Extra headers can be specified and must be separated by ``\c\L``. ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. result = request(url, httpGET, extraHeaders, "", sslContext, timeout, userAgent, proxy) var lastURL = url for i in 1..maxRedirects: if result.status.redirection(): let redirectTo = getNewLocation(lastURL, result.headers) result = request(redirectTo, httpGET, extraHeaders, "", sslContext, timeout, userAgent, proxy) lastURL = redirectTo proc getContent*(url: string, extraHeaders = "", maxRedirects = 5, sslContext: SSLContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil): string = ## | GETs the body and returns it as a string. ## | Raises exceptions for the status codes ``4xx`` and ``5xx`` ## | Extra headers can be specified and must be separated by ``\c\L``. ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. var r = get(url, extraHeaders, maxRedirects, sslContext, timeout, userAgent, proxy) if r.status[0] in {'4','5'}: raise newException(HttpRequestError, r.status) else: return r.body proc post*(url: string, extraHeaders = "", body = "", maxRedirects = 5, sslContext: SSLContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil, multipart: MultipartData = nil): Response = ## | POSTs ``body`` to the ``url`` and returns a ``Response`` object. ## | This proc adds the necessary Content-Length header. ## | This proc also handles redirection. ## | Extra headers can be specified and must be separated by ``\c\L``. ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. ## | The optional ``multipart`` parameter can be used to create ## ``multipart/form-data`` POSTs comfortably. let (mpHeaders, mpBody) = format(multipart) template withNewLine(x): expr = if x.len > 0 and not x.endsWith("\c\L"): x & "\c\L" else: x var xb = mpBody.withNewLine() & body var xh = extraHeaders.withNewLine() & mpHeaders.withNewLine() & withNewLine("Content-Length: " & $len(xb)) result = request(url, httpPOST, xh, xb, sslContext, timeout, userAgent, proxy) var lastURL = url for i in 1..maxRedirects: if result.status.redirection(): let redirectTo = getNewLocation(lastURL, result.headers) var meth = if result.status != "307": httpGet else: httpPost result = request(redirectTo, meth, xh, xb, sslContext, timeout, userAgent, proxy) lastURL = redirectTo proc postContent*(url: string, extraHeaders = "", body = "", maxRedirects = 5, sslContext: SSLContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil, multipart: MultipartData = nil): string = ## | POSTs ``body`` to ``url`` and returns the response's body as a string ## | Raises exceptions for the status codes ``4xx`` and ``5xx`` ## | Extra headers can be specified and must be separated by ``\c\L``. ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. ## | The optional ``multipart`` parameter can be used to create ## ``multipart/form-data`` POSTs comfortably. var r = post(url, extraHeaders, body, maxRedirects, sslContext, timeout, userAgent, proxy, multipart) if r.status[0] in {'4','5'}: raise newException(HttpRequestError, r.status) else: return r.body proc downloadFile*(url: string, outputFilename: string, sslContext: SSLContext = defaultSSLContext, timeout = -1, userAgent = defUserAgent, proxy: Proxy = nil) = ## | Downloads ``url`` and saves it to ``outputFilename`` ## | An optional timeout can be specified in milliseconds, if reading from the ## server takes longer than specified an ETimeout exception will be raised. var f: File if open(f, outputFilename, fmWrite): f.write(getContent(url, sslContext = sslContext, timeout = timeout, userAgent = userAgent, proxy = proxy)) f.close() else: fileError("Unable to open file") proc generateHeaders(r: Uri, httpMethod: string, headers: StringTableRef, body: string): string = # TODO: Use this in the blocking HttpClient once it supports proxies. result = substr(httpMethod, len("http")) # TODO: Proxies result.add ' ' if r.path[0] != '/': result.add '/' result.add(r.path) if r.query.len > 0: result.add("?" & r.query) result.add(" HTTP/1.1\c\L") if r.port == "": add(result, "Host: " & r.hostname & "\c\L") else: add(result, "Host: " & r.hostname & ":" & r.port & "\c\L") add(result, "Connection: Keep-Alive\c\L") if body.len > 0 and not headers.hasKey("Content-Length"): add(result, "Content-Length: " & $body.len & "\c\L") for key, val in headers: add(result, key & ": " & val & "\c\L") add(result, "\c\L") type AsyncHttpClient* = ref object socket: AsyncSocket connected: bool currentURL: Uri ## Where we are currently connected. headers*: StringTableRef maxRedirects: int userAgent: string when defined(ssl): sslContext: net.SslContext {.deprecated: [PAsyncHttpClient: AsyncHttpClient].} proc newAsyncHttpClient*(userAgent = defUserAgent, maxRedirects = 5, sslContext = defaultSslContext): AsyncHttpClient = ## Creates a new AsyncHttpClient instance. ## ## ``userAgent`` specifies the user agent that will be used when making ## requests. ## ## ``maxRedirects`` specifies the maximum amount of redirects to follow, ## default is 5. ## ## ``sslContext`` specifies the SSL context to use for HTTPS requests. new result result.headers = newStringTable(modeCaseInsensitive) result.userAgent = userAgent result.maxRedirects = maxRedirects when defined(ssl): result.sslContext = sslContext proc close*(client: AsyncHttpClient) = ## Closes any connections held by the HTTP client. if client.connected: client.socket.close() client.connected = false proc recvFull(socket: AsyncSocket, size: int): Future[string] {.async.} = ## Ensures that all the data requested is read and returned. result = "" while true: if size == result.len: break let data = await socket.recv(size - result.len) if data == "": break # We've been disconnected. result.add data proc parseChunks(client: AsyncHttpClient): Future[string] {.async.} = result = "" while true: var chunkSize = 0 var chunkSizeStr = await client.socket.recvLine() var i = 0 if chunkSizeStr == "": httpError("Server terminated connection prematurely") while true: case chunkSizeStr[i] of '0'..'9': chunkSize = chunkSize shl 4 or (ord(chunkSizeStr[i]) - ord('0')) of 'a'..'f': chunkSize = chunkSize shl 4 or (ord(chunkSizeStr[i]) - ord('a') + 10) of 'A'..'F': chunkSize = chunkSize shl 4 or (ord(chunkSizeStr[i]) - ord('A') + 10) of '\0': break of ';': # http://tools.ietf.org/html/rfc2616#section-3.6.1 # We don't care about chunk-extensions. break else: httpError("Invalid chunk size: " & chunkSizeStr) inc(i) if chunkSize <= 0: discard await recvFull(client.socket, 2) # Skip \c\L break result.add await recvFull(client.socket, chunkSize) discard await recvFull(client.socket, 2) # Skip \c\L # Trailer headers will only be sent if the request specifies that we want # them: http://tools.ietf.org/html/rfc2616#section-3.6.1 proc parseBody(client: AsyncHttpClient, headers: HttpHeaders, httpVersion: string): Future[string] {.async.} = result = "" if headers.getOrDefault"Transfer-Encoding" == "chunked": result = await parseChunks(client) else: # -REGION- Content-Length # (http://tools.ietf.org/html/rfc2616#section-4.4) NR.3 var contentLengthHeader = headers.getOrDefault"Content-Length" if contentLengthHeader != "": var length = contentLengthHeader.parseint() if length > 0: result = await client.socket.recvFull(length) if result == "": httpError("Got disconnected while trying to read body.") if result.len != length: httpError("Received length doesn't match expected length. Wanted " & $length & " got " & $result.len) else: # (http://tools.ietf.org/html/rfc2616#section-4.4) NR.4 TODO # -REGION- Connection: Close # (http://tools.ietf.org/html/rfc2616#section-4.4) NR.5 if headers.getOrDefault"Connection" == "close" or httpVersion == "1.0": var buf = "" while true: buf = await client.socket.recvFull(4000) if buf == "": break result.add(buf) proc parseResponse(client: AsyncHttpClient, getBody: bool): Future[Response] {.async.} = var parsedStatus = false var linei = 0 var fullyRead = false var line = "" result.headers = newHttpHeaders() while true: linei = 0 line = await client.socket.recvLine() if line == "": break # We've been disconnected. if line == "\c\L": fullyRead = true break if not parsedStatus: # Parse HTTP version info and status code. var le = skipIgnoreCase(line, "HTTP/", linei) if le <= 0: httpError("invalid http version, " & line.repr) inc(linei, le) le = skipIgnoreCase(line, "1.1", linei) if le > 0: result.version = "1.1" else: le = skipIgnoreCase(line, "1.0", linei) if le <= 0: httpError("unsupported http version") result.version = "1.0" inc(linei, le) # Status code linei.inc skipWhitespace(line, linei) result.status = line[linei .. ^1] parsedStatus = true else: # Parse headers var name = "" var le = parseUntil(line, name, ':', linei) if le <= 0: httpError("invalid headers") inc(linei, le) if line[linei] != ':': httpError("invalid headers") inc(linei) # Skip : result.headers[name] = line[linei.. ^1].strip() if result.headers.len > headerLimit: httpError("too many headers") if not fullyRead: httpError("Connection was closed before full request has been made") if getBody: result.body = await parseBody(client, result.headers, result.version) else: result.body = "" proc newConnection(client: AsyncHttpClient, url: Uri) {.async.} = if client.currentURL.hostname != url.hostname or client.currentURL.scheme != url.scheme: if client.connected: client.close() client.socket = newAsyncSocket() # TODO: I should be able to write 'net.Port' here... let port = if url.port == "": if url.scheme.toLower() == "https": nativesockets.Port(443) else: nativesockets.Port(80) else: nativesockets.Port(url.port.parseInt) if url.scheme.toLower() == "https": when defined(ssl): client.sslContext.wrapSocket(client.socket) else: raise newException(HttpRequestError, "SSL support is not available. Cannot connect over SSL.") await client.socket.connect(url.hostname, port) client.currentURL = url client.connected = true proc request*(client: AsyncHttpClient, url: string, httpMethod: string, body = ""): Future[Response] {.async.} = ## Connects to the hostname specified by the URL and performs a request ## using the custom method string specified by ``httpMethod``. ## ## Connection will kept alive. Further requests on the same ``client`` to ## the same hostname will not require a new connection to be made. The ## connection can be closed by using the ``close`` procedure. ## ## The returned future will complete once the request is completed. let r = parseUri(url) await newConnection(client, r) if not client.headers.hasKey("user-agent") and client.userAgent != "": client.headers["User-Agent"] = client.userAgent var headers = generateHeaders(r, $httpMethod, client.headers, body) await client.socket.send(headers) if body != "": await client.socket.send(body) result = await parseResponse(client, httpMethod != "httpHEAD") proc request*(client: AsyncHttpClient, url: string, httpMethod = httpGET, body = ""): Future[Response] = ## Connects to the hostname specified by the URL and performs a request ## using the method specified. ## ## Connection will kept alive. Further requests on the same ``client`` to ## the same hostname will not require a new connection to be made. The ## connection can be closed by using the ``close`` procedure. ## ## The returned future will complete once the request is completed. result = request(client, url, $httpMethod, body) proc get*(client: AsyncHttpClient, url: string): Future[Response] {.async.} = ## Connects to the hostname specified by the URL and performs a GET request. ## ## This procedure will follow redirects up to a maximum number of redirects ## specified in ``newAsyncHttpClient``. result = await client.request(url, httpGET) var lastURL = url for i in 1..client.maxRedirects: if result.status.redirection(): let redirectTo = getNewLocation(lastURL, result.headers) result = await client.request(redirectTo, httpGET) lastURL = redirectTo proc post*(client: AsyncHttpClient, url: string, body = "", multipart: MultipartData = nil): Future[Response] {.async.} = ## Connects to the hostname specified by the URL and performs a POST request. ## ## This procedure will follow redirects up to a maximum number of redirects ## specified in ``newAsyncHttpClient``. let (mpHeader, mpBody) = format(multipart) template withNewLine(x): expr = if x.len > 0 and not x.endsWith("\c\L"): x & "\c\L" else: x var xb = mpBody.withNewLine() & body if multipart != nil: client.headers["Content-Type"] = mpHeader.split(": ")[1] client.headers["Content-Length"] = $len(xb) result = await client.request(url, httpPOST, xb) when not defined(testing) and isMainModule: when true: # Async proc main() {.async.} = var client = newAsyncHttpClient() var resp = await client.request("http://picheta.me") echo("Got response: ", resp.status) echo("Body:\n") echo(resp.body) resp = await client.request("http://picheta.me/asfas.html") echo("Got response: ", resp.status) resp = await client.request("http://picheta.me/aboutme.html") echo("Got response: ", resp.status) resp = await client.request("http://nim-lang.org/") echo("Got response: ", resp.status) resp = await client.request("http://nim-lang.org/download.html") echo("Got response: ", resp.status) waitFor main() else: #downloadFile("http://force7.de/nim/index.html", "nimindex.html") #downloadFile("http://www.httpwatch.com/", "ChunkTest.html") #downloadFile("http://validator.w3.org/check?uri=http%3A%2F%2Fgoogle.com", # "validator.html") #var r = get("http://validator.w3.org/check?uri=http%3A%2F%2Fgoogle.com& # charset=%28detect+automatically%29&doctype=Inline&group=0") var data = newMultipartData() data["output"] = "soap12" data["uploaded_file"] = ("test.html", "text/html", "<html><head></head><body><p>test</p></body></html>") echo postContent("http://validator.w3.org/check", multipart=data)