diff options
author | bptato <nincsnevem662@gmail.com> | 2022-07-30 18:28:19 +0200 |
---|---|---|
committer | bptato <nincsnevem662@gmail.com> | 2022-07-30 18:28:19 +0200 |
commit | ed7afdbfd3686babaa81b4a9f169ba5d60178200 (patch) | |
tree | 7e02faeca3764ae2ae02a70b417eb44b21ca14c6 /src | |
parent | b2bf6774fbf3cc924aa7abd182366fe9c5d65279 (diff) | |
download | chawan-ed7afdbfd3686babaa81b4a9f169ba5d60178200.tar.gz |
Implement HTTP Auth
Also use our own redirection implementation instead of curl follow redirects.
Diffstat (limited to 'src')
-rw-r--r-- | src/bindings/curl.nim | 23 | ||||
-rw-r--r-- | src/client.nim | 113 | ||||
-rw-r--r-- | src/io/loader.nim | 46 | ||||
-rw-r--r-- | src/types/url.nim | 4 |
4 files changed, 146 insertions, 40 deletions
diff --git a/src/bindings/curl.nim b/src/bindings/curl.nim index 64fce485..db120264 100644 --- a/src/bindings/curl.nim +++ b/src/bindings/curl.nim @@ -25,6 +25,17 @@ const CURLOPTTYPE_CBPOINT = CURLOPTTYPE_OBJECTPOINT CURLOPTTYPE_VALUES = CURLOPTTYPE_LONG +const + CURLINFO_STRING = 0x100000 + CURLINFO_LONG = 0x200000 + CURLINFO_DOUBLE = 0x300000 + CURLINFO_SLIST = 0x400000 + CURLINFO_PTR = 0x400000 # same as SLIST + CURLINFO_SOCKET = 0x500000 + CURLINFO_OFF_T = 0x600000 + CURLINFO_MASK = 0x0fffff + CURLINFO_TYPEMASK = 0xf00000 + {.push cdecl, dynlib: curllib.} type @@ -69,6 +80,15 @@ type CURLOPT_PROXY_SSLKEY_BLOB = CURLOPTTYPE_BLOB + 294 CURLOPT_ISSUECERT_BLOB = CURLOPTTYPE_BLOB + 295 + CURLINFO* {.size: sizeof(cint).} = enum + CURLINFO_NONE # first, never use this + + # String + CURLINFO_REDIRECT_URL = CURLINFO_STRING + 31 + + # Long + CURLINFO_RESPONSE_CODE = CURLINFO_LONG + 2 + CURLcode* {.size: sizeof(cint).} = enum CURLE_OK = 0, CURLE_UNSUPPORTED_PROTOCOL, # 1 @@ -202,8 +222,9 @@ proc curl_global_cleanup*() {.importc: "curl_global_cleanup".} proc curl_easy_init*(): CURL {.importc: "curl_easy_init".} proc curl_easy_cleanup*(handle: CURL) {.importc: "curl_easy_cleanup".} -proc curl_easy_setopt*(handle: CURL, option: CURLoption) {.importc: "curl_easy_setopt", varargs.} +proc curl_easy_setopt*(handle: CURL, option: CURLoption): CURLcode {.importc: "curl_easy_setopt", varargs.} proc curl_easy_perform*(handle: CURL): CURLcode {.importc: "curl_easy_perform".} +proc curl_easy_getinfo*(handle: CURL, info: CURLINFO): CURLcode {.importc: "curl_easy_getinfo", varargs.} proc curl_mime_init*(handle: CURL): curl_mime {.importc: "curl_mime_init".} proc curl_mime_free*(mime: curl_mime) {.importc: "curl_mime_free".} diff --git a/src/client.nim b/src/client.nim index 2ab6281b..480007f1 100644 --- a/src/client.nim +++ b/src/client.nim @@ -31,6 +31,8 @@ type jsctx: JSContext regex: Option[Regex] revsearch: bool + needsauth: bool + redirecturl: Option[Url] ActionError = object of IOError LoadError = object of ActionError @@ -104,6 +106,18 @@ proc nextBuffer(client: Client) = client.buffer = client.buffer.next client.buffer.redraw = true +proc discardBuffer(buffer: Buffer) = + if buffer.next != nil: + if buffer.sourcepair != nil: + buffer.sourcepair.sourcepair = nil + buffer.next.prev = buffer.prev + buffer.redraw = true + elif buffer.prev != nil: + if buffer.sourcepair != nil: + buffer.sourcepair.sourcepair = nil + buffer.prev.next = buffer.next + buffer.redraw = true + proc discardBuffer(client: Client) = if client.buffer.next != nil: if client.buffer.sourcepair != nil: @@ -145,23 +159,29 @@ proc readPipe(client: Client, ctype: string) = client.buffer.drawBuffer() var g_client: Client -proc gotoUrl(client: Client, url: Url, click = none(ClickAction), prevurl = none(Url), force = false, newbuf = true, ctype = "") = +proc getPage(client: Client, url: Url, click = none(ClickAction)): LoadResult = + let page = if click.isnone: + client.loader.getPage(url) + else: + client.loader.getPage(url, click.get.httpmethod, click.get.mimetype, click.get.body, click.get.multipart) + return page + +# Load url in a new buffer. +proc gotoUrl(client: Client, url: Url, click = none(ClickAction), prevurl = none(Url), force = false, ctype = "") = setControlCHook(proc() {.noconv.} = raise newException(InterruptError, "Interrupted")) if force or prevurl.issome or not prevurl.get.equals(url, true): try: - let page = if click.isnone: - client.loader.getPage(url) - else: - client.loader.getPage(url, click.get.httpmethod, click.get.mimetype, click.get.body, click.get.multipart) + let page = client.getPage(url, click) + client.needsauth = page.status == 401 # Unauthorized + client.redirecturl = page.redirect if page.s != nil: - if newbuf: - client.addBuffer() - g_client = client - setControlCHook(proc() {.noconv.} = - if g_client.buffer.prev != nil or g_client.buffer.next != nil: - g_client.discardBuffer() - interruptError()) + client.addBuffer() + g_client = client + setControlCHook(proc() {.noconv.} = + if g_client.buffer.prev != nil or g_client.buffer.next != nil: + g_client.discardBuffer() + interruptError()) client.buffer.istream = page.s client.buffer.contenttype = if ctype != "": ctype else: page.contenttype else: @@ -174,35 +194,46 @@ proc gotoUrl(client: Client, url: Url, click = none(ClickAction), prevurl = none client.buffer.location = url client.setupBuffer() -proc gotoUrl(client: Client, url: string, click = none(ClickAction), prevurl = none(Url), force = false, newbuf = true, ctype = "") = - var oldurl = prevurl - if oldurl.isnone and client.buffer != nil: - oldurl = client.buffer.location.some - let newurl = parseUrl(url, oldurl) +# Relative gotoUrl: either to prevurl, or if that's none, client.buffer.url. +proc gotoUrl(client: Client, url: string, click = none(ClickAction), prevurl = none(Url), force = false, ctype = "") = + var prevurl = prevurl + if prevurl.isnone and client.buffer != nil: + prevurl = client.buffer.location.some + let newurl = parseUrl(url, prevurl) if newurl.isnone: loadError("Invalid URL " & url) - client.gotoUrl(newurl.get, click, oldurl, force, newbuf, ctype) - + client.gotoUrl(newurl.get, click, prevurl, force, ctype) + +# When the user has passed a partial URL as an argument, they might've meant +# several things: +# * the URL as it is +# * file://$PWD/<file> +# * https://<url> +# So we attempt to visit each of them, in the order described above. proc loadUrl(client: Client, url: string, ctype = "") = let firstparse = parseUrl(url) if firstparse.issome: - client.gotoUrl(url, none(ClickAction), none(Url), true, true, ctype) + client.gotoUrl(url, none(ClickAction), none(Url), true, ctype) else: let cdir = parseUrl("file://" & getCurrentDir() & DirSep) try: # attempt to load local file - client.gotoUrl(url, none(ClickAction), cdir, true, true, ctype) + client.gotoUrl(url, none(ClickAction), cdir, true, ctype) except LoadError: try: # attempt to load local file (this time percent encoded) - client.gotoUrl(percentEncode(url, LocalPathPercentEncodeSet), none(ClickAction), cdir, true, true, ctype) + client.gotoUrl(percentEncode(url, LocalPathPercentEncodeSet), none(ClickAction), cdir, true, ctype) except LoadError: # attempt to load remote page - client.gotoUrl("http://" & url, none(ClickAction), none(Url), true, true, ctype) + client.gotoUrl("http://" & url, none(ClickAction), none(Url), true, ctype) +# Reload the page in a new buffer, then kill the previous buffer. proc reloadPage(client: Client) = - client.gotoUrl(client.buffer.location, none(ClickAction), none(Url), true, true, client.buffer.contenttype) + let buf = client.buffer + client.gotoUrl(client.buffer.location, none(ClickAction), none(Url), true, client.buffer.contenttype) + discardBuffer(buf) +# Open a URL prompt and visit the specified URL. proc changeLocation(client: Client) = let buffer = client.buffer var url = buffer.location.serialize(true) @@ -441,6 +472,34 @@ proc input(client: Client) = of ACTION_SEARCH_PREV: client.searchPrev() else: discard +proc checkAuth(client: Client) = + client.statusMode() + var username = "" + let ustatus = readLine("Username: ", username, client.buffer.width) + if not ustatus: + client.needsauth = false + return + client.statusMode() + var password = "" + let pstatus = readLine("Password: ", password, client.buffer.width) + if not pstatus: + client.needsauth = false + return + var url = client.buffer.location + url.username = username + url.password = password + var buf = client.buffer + client.gotoUrl(url, prevurl = some(client.buffer.location)) + discardBuffer(buf) + +proc followRedirect(client: Client) = + if client.redirecturl.issome: + var buf = client.buffer + let redirecturl = client.redirecturl.get + client.redirecturl = none(Url) + client.gotoUrl(redirecturl, prevurl = some(client.buffer.location)) + discardBuffer(buf) + proc inputLoop(client: Client) = while true: g_client = client @@ -450,6 +509,12 @@ proc inputLoop(client: Client) = g_client.buffer.reshape = false g_client.inputLoop()) client.buffer.refreshBuffer() + while client.redirecturl.issome: + client.followRedirect() + client.buffer.refreshBuffer() + if client.needsauth: # Unauthorized + client.checkAuth() + client.buffer.refreshBuffer() try: client.input() except ActionError as e: diff --git a/src/io/loader.nim b/src/io/loader.nim index f6af4c87..3c894d55 100644 --- a/src/io/loader.nim +++ b/src/io/loader.nim @@ -16,6 +16,9 @@ type LoadResult* = object s*: Stream contenttype*: string + status*: int + headers*: HttpHeaders + redirect*: Option[Url] HeaderResult = ref object statusline: bool @@ -82,30 +85,39 @@ proc curlWriteBody(p: cstring, size: csize_t, nmemb: csize_t, userdata: pointer) stream.flush() return nmemb +template setopt(curl: CURL, opt: CURLoption, arg: typed) = + discard curl_easy_setopt(curl, opt, arg) + +template setopt(curl: CURL, opt: CURLoption, arg: string) = + discard curl_easy_setopt(curl, opt, cstring(arg)) + +template getinfo(curl: CURL, info: CURLINFO, arg: typed) = + discard curl_easy_getinfo(curl, info, arg) + proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, mimetype = "", body: string = "", multipart: MultipartDataClone = nil): LoadResult = let curl = curl_easy_init() if curl == nil: return # fail let surl = url.serialize() - curl_easy_setopt(curl, CURLOPT_URL, cstring(surl)) + curl.setopt(CURLOPT_URL, surl) var cs = newStringStream() - curl_easy_setopt(curl, CURLOPT_WRITEDATA, cs) - curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, curlWriteBody) + curl.setopt(CURLOPT_WRITEDATA, cs) + curl.setopt(CURLOPT_WRITEFUNCTION, curlWriteBody) let headers = newHttpHeaders(true) let headerres = HeaderResult(headers: headers) - curl_easy_setopt(curl, CURLOPT_HEADERDATA, headerres) - curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, curlWriteHeader) - curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1) + curl.setopt(CURLOPT_HEADERDATA, headerres) + curl.setopt(CURLOPT_HEADERFUNCTION, curlWriteHeader) + #curl.setopt(CURLOPT_FOLLOWLOCATION, 1) var mime: curl_mime = nil case smethod - of HttpGet: curl_easy_setopt(curl, CURLOPT_HTTPGET, 1) + of HttpGet: curl.setopt(CURLOPT_HTTPGET, 1) of HttpPost: - curl_easy_setopt(curl, CURLOPT_POST, 1) + curl.setopt(CURLOPT_POST, 1) if multipart != nil: mime = curl_mime_init(curl) if mime == nil: return # fail @@ -123,10 +135,10 @@ proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, curl_mime_filename(part, cstring(entry.filename)) else: curl_mime_data(part, cstring(entry.content), csize_t(entry.content.len)) - curl_easy_setopt(curl, CURLOPT_MIMEPOST, mime) + curl.setopt(CURLOPT_MIMEPOST, mime) elif body != "": - curl_easy_setopt(curl, CURLOPT_POSTFIELDS, cstring(body)) - curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, body.len) + curl.setopt(CURLOPT_POSTFIELDS, cstring(body)) + curl.setopt(CURLOPT_POSTFIELDSIZE, body.len) else: discard var requestHeaders = newHttpHeaders(true) @@ -138,10 +150,10 @@ proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, let header = k & ": " & v slist = curl_slist_append(slist, cstring(header)) if slist != nil: - curl_easy_setopt(curl, CURLOPT_HTTPHEADER, slist) + curl.setopt(CURLOPT_HTTPHEADER, slist) let res = curl_easy_perform(curl) - if res == CURLE_OK: # TODO handle http errors + if res == CURLE_OK: # TODO handle errors cs.setPosition(0) result.s = cs @@ -150,6 +162,13 @@ proc getPageLibcurl(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, result.contenttype = ct.until(';') else: result.contenttype = guessContentType(url.path.serialize()) + discard curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, addr result.status) + if result.status in {301, 302, 303}: #TODO 300, 304, 307 + var urlp: cstring + curl.getinfo(CURLINFO_REDIRECT_URL, addr urlp) + if urlp != nil: + let urls = $urlp + result.redirect = parseUrl(urls, some(url)) curl_easy_cleanup(curl) if mime != nil: @@ -165,6 +184,7 @@ proc getPage*(loader: FileLoader, url: Url, smethod: HttpMethod = HttpGet, mimet let path = url.path.serialize_unicode() result.contenttype = guessContentType(path) result.s = newFileStream(path, fmRead) + result.status = 200 # doesn't make much sense... elif url.scheme == "http" or url.scheme == "https": return getPageLibcurl(loader, url, smethod, mimetype, body, cast[MultipartDataClone](multipart)) diff --git a/src/types/url.nim b/src/types/url.nim index afe7d83a..7f41025b 100644 --- a/src/types/url.nim +++ b/src/types/url.nim @@ -37,8 +37,8 @@ type Url* = object encoding: int #TODO scheme*: string - username: string - password: string + username*: string + password*: string port: Option[uint16] host: Option[Host] path*: UrlPath |