about summary refs log tree commit diff stats
path: root/src/io/loader.nim
diff options
context:
space:
mode:
authorbptato <nincsnevem662@gmail.com>2022-08-04 21:39:16 +0200
committerbptato <nincsnevem662@gmail.com>2022-08-04 21:39:16 +0200
commite7ea9c408667a4fdfefc369e51d72c3cfb9c1ee9 (patch)
tree45847ebb77cab10981fa0b47b9cff6f8a3fcd0a5 /src/io/loader.nim
parent7f537f7991c384fd88fb1fecf6aa5c05edb6baee (diff)
downloadchawan-e7ea9c408667a4fdfefc369e51d72c3cfb9c1ee9.tar.gz
Remove httpclient dependency
Diffstat (limited to 'src/io/loader.nim')
-rw-r--r--src/io/loader.nim81
1 files changed, 58 insertions, 23 deletions
diff --git a/src/io/loader.nim b/src/io/loader.nim
index 0155ac42..2efc15c2 100644
--- a/src/io/loader.nim
+++ b/src/io/loader.nim
@@ -1,7 +1,7 @@
-import httpclient
 import options
 import streams
 import strutils
+import tables
 
 import bindings/curl
 import types/mime
@@ -9,41 +9,74 @@ import types/url
 import utils/twtstr
 
 type
+  HttpMethod* = enum
+    HTTP_CONNECT, HTTP_DELETE, HTTP_GET, HTTP_HEAD, HTTP_OPTIONS, HTTP_PATCH,
+    HTTP_POST, HTTP_PUT, HTTP_TRACE
+
+type
   FileLoader* = ref object
-    headers*: HttpHeaders
+    headers*: HttpHeaderList
+
+  HttpHeaderList = ref object
+    table: Table[string, seq[string]]
 
   LoadResult* = object
     s*: Stream
     contenttype*: string
     status*: int
-    headers*: HttpHeaders
+    headers*: HttpHeaderList
     redirect*: Option[Url]
 
   HeaderResult = ref object
     statusline: bool
-    headers: HttpHeaders
+    headers: HttpHeaderList
 
 const DefaultHeaders = {
   "User-Agent": "chawan",
   "Accept": "text/html,text/*;q=0.5",
   "Accept-Language": "en;q=1.0",
   "Pragma": "no-cache",
-  "Cache-control": "no-cache",
+  "Cache-Control": "no-cache",
 }
 
-proc newFileLoader*(headers: HttpHeaders): FileLoader =
+proc newFileLoader*(headers: HttpHeaderList): FileLoader =
   new(result)
   result.headers = headers
 
+proc newHttpHeaderList*(): HttpHeaderList =
+  new(result)
+
+proc add(headers: HttpHeaderList, k, v: string) =
+  let k = k.toHeaderCase()
+  if k notin headers.table:
+    headers.table[k] = @[v]
+  else:
+    headers.table[k].add(v)
+
+proc `[]=`(headers: HttpHeaderList, k, v: string) =
+  headers.table[k.toHeaderCase()] = @[v]
+
+iterator pairs(headers: HttpHeaderList): (string, string) =
+  for k, vs in headers.table:
+    for v in vs:
+      yield (k, v)
+
 proc newFileLoader*(): FileLoader =
-  var headers = newHttpHeaders(true)
+  var headers = new(HttpHeaderList)
   for header in DefaultHeaders:
     headers[header[0]] = header[1]
   newFileLoader(headers)
 
-#TODO replace this with our own multipart object
+proc getOrDefault*(headers: HttpHeaderList, k: string): string =
+  let k = k.toHeaderCase()
+  if k in headers.table:
+    headers.table[k][0]
+  else:
+    k
+
+# Originally from the stdlib
 type
-  MultipartEntryClone* = object
+  MimePart* = object
     name, content: string
     case isFile: bool
     of true:
@@ -52,8 +85,11 @@ type
       isStream: bool
     else: discard
 
-  MultipartDataClone* = ref object
-    content: seq[MultipartEntryClone]
+  MimeData* = ref object
+    content: seq[MimePart]
+
+proc `[]=`*(multipart: MimeData, k, v: string) =
+  multipart.content.add(MimePart(name: k, content: v))
 
 proc curlWriteHeader(p: cstring, size: csize_t, nitems: csize_t, userdata: pointer): csize_t {.cdecl.} =
   var line = newString(nitems)
@@ -63,12 +99,12 @@ proc curlWriteHeader(p: cstring, size: csize_t, nitems: csize_t, userdata: point
   let headers = cast[HeaderResult](userdata)
   if not headers.statusline:
     headers.statusline = true
-    return nitems #TODO handle status line
+    return nitems #TODO handle status line?
 
   let k = line.until(':')
 
   if k.len == line.len:
-    return nitems # probably empty line, or invalid
+    return nitems # empty line (last, before body) or invalid (=> error)
 
   let v = line.substr(k.len + 1).strip()
   headers.headers.add(k, v)
@@ -92,7 +128,7 @@ template setopt(curl: CURL, opt: CURLoption, arg: string) =
 template getinfo(curl: CURL, info: CURLINFO, arg: typed) =
   discard curl_easy_getinfo(curl, info, arg)
 
-proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, mimetype = "", body: string = "", multipart: MultipartDataClone = nil): LoadResult =
+proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HTTP_GET, mimetype = "", body: string = "", multipart: MimeData = nil): LoadResult =
   let curl = curl_easy_init()
 
   if curl == nil: return # fail
@@ -104,17 +140,16 @@ proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet,
   curl.setopt(CURLOPT_WRITEDATA, cs)
   curl.setopt(CURLOPT_WRITEFUNCTION, curlWriteBody)
 
-  let headers = newHttpHeaders(true)
+  let headers = newHttpHeaderList()
   let headerres = HeaderResult(headers: headers)
   curl.setopt(CURLOPT_HEADERDATA, headerres)
   curl.setopt(CURLOPT_HEADERFUNCTION, curlWriteHeader)
-  #curl.setopt(CURLOPT_FOLLOWLOCATION, 1)
 
   var mime: curl_mime = nil
 
   case smethod
-  of HttpGet: curl.setopt(CURLOPT_HTTPGET, 1)
-  of HttpPost:
+  of HTTP_GET: curl.setopt(CURLOPT_HTTPGET, 1)
+  of HTTP_POST:
     curl.setopt(CURLOPT_POST, 1)
     if multipart != nil:
       mime = curl_mime_init(curl)
@@ -137,9 +172,9 @@ proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet,
     elif body != "":
       curl.setopt(CURLOPT_POSTFIELDS, cstring(body))
       curl.setopt(CURLOPT_POSTFIELDSIZE, body.len)
-  else: discard
+  else: discard #TODO
 
-  var requestHeaders = newHttpHeaders(true)
+  var requestHeaders = newHttpHeaderList()
   requestHeaders.table = loader.headers.table
   if mimetype != "":
     requestHeaders["Content-Type"] = mimetype
@@ -174,7 +209,7 @@ proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet,
   if slist != nil:
     curl_slist_free_all(slist)
 
-proc getPage*(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, mimetype = "", body: string = "", multipart: MultipartData = nil): LoadResult =
+proc getPage*(loader: FileLoader, url: Url, smethod: HttpMethod = HTTP_GET, mimetype = "", body: string = "", multipart: MimeData = nil): LoadResult =
   if url.scheme == "file":
     when defined(windows) or defined(OS2) or defined(DOS):
       let path = url.path.serialize_unicode_dos()
@@ -184,9 +219,9 @@ proc getPage*(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, mimet
     result.s = newFileStream(path, fmRead)
     result.status = 200 # doesn't make much sense...
   elif url.scheme == "http" or url.scheme == "https":
-    return getPageLibcurl(loader, url, smethod, mimetype, body, cast[MultipartDataClone](multipart))
+    return getPageLibcurl(loader, url, smethod, mimetype, body, multipart)
 
-proc getPage*(loader: FileLoader, url: string, smethod: HttpMethod = HttpGet, mimetype = "", body: string = "", multipart: MultipartData = nil): LoadResult =
+proc getPage*(loader: FileLoader, url: string, smethod: HttpMethod = HTTP_GET, mimetype = "", body: string = "", multipart: MimeData = nil): LoadResult =
   let url = parseUrl(url)
   if url.isnone:
     raise newException(Exception, "Invalid URL")