about summary refs log tree commit diff stats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/config/config.nim16
-rw-r--r--src/loader/loader.nim33
-rw-r--r--src/loader/request.nim4
-rw-r--r--src/local/pager.nim26
4 files changed, 32 insertions, 47 deletions
diff --git a/src/config/config.nim b/src/config/config.nim
index 432f5875..a01187a7 100644
--- a/src/config/config.nim
+++ b/src/config/config.nim
@@ -47,6 +47,7 @@ type
     images*: Opt[bool]
     stylesheet*: Opt[string]
     proxy*: Opt[URL]
+    default_headers*: Opt[Table[string, string]]
 
   OmniRule* = object
     match*: Regex
@@ -87,7 +88,7 @@ type
     max_redirect* {.jsgetset.}: int32
     prepend_https* {.jsgetset.}: bool
     prepend_scheme* {.jsgetset.}: string
-    proxy* {.jsgetset.}: Opt[string]
+    proxy* {.jsgetset.}: URL
     default_headers* {.jsgetset.}: Table[string, string]
 
   DisplayConfig = object
@@ -249,19 +250,6 @@ proc bindLineKey(config: Config; key, action: string) {.jsfunc.} =
 proc hasprop(a: ptr ActionMap; s: string): bool {.jshasprop.} =
   return s in a[]
 
-func getProxy*(config: Config): URL =
-  if config.network.proxy.isSome:
-    let s = config.network.proxy.get
-    let x = parseURL(s)
-    if x.isSome:
-      return x.get
-    else:
-      raise newException(ValueError, "Invalid proxy URL: " & s)
-  return nil
-
-func getDefaultHeaders*(config: Config): Headers =
-  return newHeaders(config.network.default_headers)
-
 proc openFileExpand(dir, file: string): FileStream =
   if file.len == 0:
     return nil
diff --git a/src/loader/loader.nim b/src/loader/loader.nim
index 6cc37b71..9c9c2e8a 100644
--- a/src/loader/loader.nim
+++ b/src/loader/loader.nim
@@ -429,6 +429,21 @@ proc loadResource(ctx: LoaderContext; client: ClientData; request: Request;
   if tries >= MaxRewrites:
     handle.rejectHandle(ERROR_TOO_MANY_REWRITES)
 
+proc setupRequestDefaults*(request: Request; config: LoaderClientConfig) =
+  request.defaultHeadersSet = true
+  for k, v in config.defaultHeaders.table:
+    if k notin request.headers.table:
+      request.headers.table[k] = v
+  if config.cookieJar != nil and config.cookieJar.cookies.len > 0:
+    if "Cookie" notin request.headers.table:
+      let cookie = config.cookieJar.serialize(request.url)
+      if cookie != "":
+        request.headers["Cookie"] = cookie
+  if request.referrer != nil and "Referer" notin request.headers:
+    let r = request.referrer.getReferrer(request.url, config.referrerPolicy)
+    if r != "":
+      request.headers["Referer"] = r
+
 proc onLoad(ctx: LoaderContext; stream: SocketStream; client: ClientData) =
   var request: Request
   stream.sread(request)
@@ -440,20 +455,10 @@ proc onLoad(ctx: LoaderContext; stream: SocketStream; client: ClientData) =
   if not client.config.filter.match(request.url):
     handle.rejectHandle(ERROR_DISALLOWED_URL)
   else:
-    for k, v in client.config.defaultHeaders.table:
-      if k notin request.headers.table:
-        request.headers.table[k] = v
-    let cookieJar = client.config.cookieJar
-    if cookieJar != nil and cookieJar.cookies.len > 0:
-      if "Cookie" notin request.headers.table:
-        let cookie = cookieJar.serialize(request.url)
-        if cookie != "":
-          request.headers["Cookie"] = cookie
-    if request.referrer != nil and "Referer" notin request.headers:
-      let r = request.referrer.getReferrer(request.url,
-        client.config.referrerPolicy)
-      if r != "":
-        request.headers["Referer"] = r
+    if ctx.pagerClient != client or not request.defaultHeadersSet:
+      # do not override defaults for pager, because it starts requests that
+      # later belong to buffers.
+      request.setupRequestDefaults(client.config)
     if request.proxy == nil or not client.config.acceptProxy:
       request.proxy = client.config.proxy
     ctx.loadResource(client, request, handle)
diff --git a/src/loader/request.nim b/src/loader/request.nim
index 05b02114..60632041 100644
--- a/src/loader/request.nim
+++ b/src/loader/request.nim
@@ -81,6 +81,10 @@ type
     # when set to true, the loader will not write data from the body (not
     # headers!) into the output until a resume is received.
     suspended*: bool
+    # if defaultHeadersSet is set, then loader will not set default headers
+    # for the request if it was received from the pager. (this is used when
+    # starting requests for new buffers.)
+    defaultHeadersSet*: bool
 
 jsDestructor(Request)
 
diff --git a/src/local/pager.nim b/src/local/pager.nim
index 31652817..b8041236 100644
--- a/src/local/pager.nim
+++ b/src/local/pager.nim
@@ -40,7 +40,6 @@ import types/cell
 import types/color
 import types/cookie
 import types/opt
-import types/referrer
 import types/url
 import types/winattrs
 import utils/strwidth
@@ -123,7 +122,6 @@ type
     numload*: int # number of pages currently being loaded
     precnum*: int32 # current number prefix (when vi-numeric-prefix is true)
     procmap*: seq[ProcMapItem]
-    proxy: URL
     redraw: bool
     regex: Opt[Regex]
     reverseSearch: bool
@@ -276,7 +274,6 @@ proc newPager*(config: Config; forkserver: ForkServer; ctx: JSContext;
   return Pager(
     config: config,
     forkserver: forkserver,
-    proxy: config.getProxy(),
     term: newTerminal(stdout, config),
     alerts: alerts
   )
@@ -298,8 +295,8 @@ proc setLoader*(pager: Pager, loader: FileLoader) =
   pager.devRandom = newPosixStream("/dev/urandom", O_RDONLY, 0)
   pager.loader = loader
   let config = LoaderClientConfig(
-    defaultHeaders: pager.config.getDefaultHeaders(),
-    proxy: pager.config.getProxy(),
+    defaultHeaders: newHeaders(pager.config.network.default_headers),
+    proxy: pager.config.network.proxy,
     filter: newURLFilter(default = true),
   )
   loader.key = pager.addLoaderClient(pager.loader.clientPid, config)
@@ -506,18 +503,7 @@ proc newContainer(pager: Pager; bufferConfig: BufferConfig;
     contentType = none(string); charsetStack: seq[Charset] = @[];
     url = request.url; cacheId = -1; cacheFile = ""): Container =
   request.suspended = true
-  if loaderConfig.cookieJar != nil:
-    # loader stores cookie jars per client, but we have no client yet.
-    # therefore we must set cookie here
-    let cookie = loaderConfig.cookieJar.serialize(request.url)
-    if cookie != "":
-      request.headers["Cookie"] = cookie
-  if request.referrer != nil:
-    # same with referrer
-    let r = request.referrer.getReferrer(request.url,
-      loaderConfig.referrerPolicy)
-    if r != "":
-      request.headers["Referer"] = r
+  request.setupRequestDefaults(loaderConfig)
   let stream = pager.loader.startRequest(request)
   pager.loader.registerFun(stream.fd)
   let container = newContainer(
@@ -884,12 +870,12 @@ proc applySiteconf(pager: Pager; url: var URL; charsetOverride: Charset;
   let host = url.host
   var referer_from = false
   var cookieJar: CookieJar = nil
-  var headers = pager.config.getDefaultHeaders()
+  var headers = newHeaders(pager.config.network.default_headers)
   var scripting = false
   var images = false
   var charsets = pager.config.encoding.document_charset
   var userstyle = pager.config.css.stylesheet
-  var proxy = pager.proxy
+  var proxy = pager.config.network.proxy
   for sc in pager.config.siteconf:
     if sc.url.isSome and not sc.url.get.match($url):
       continue
@@ -922,6 +908,8 @@ proc applySiteconf(pager: Pager; url: var URL; charsetOverride: Charset;
       userstyle &= sc.stylesheet.get
     if sc.proxy.isSome:
       proxy = sc.proxy.get
+    if sc.default_headers.isSome:
+      headers = newHeaders(sc.default_headers.get)
   loaderConfig = LoaderClientConfig(
     defaultHeaders: headers,
     cookiejar: cookieJar,