summary refs log tree commit diff stats
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/atlas/atlas.md77
-rw-r--r--tools/atlas/atlas.nim559
-rw-r--r--tools/atlas/osutils.nim51
-rw-r--r--tools/atlas/packagesjson.nim114
-rw-r--r--tools/atlas/parse_requires.nim2
-rw-r--r--tools/atlas/testdata.nim63
-rw-r--r--tools/atlas/tests/balls.nimble32
-rw-r--r--tools/atlas/tests/grok.nimble5
-rw-r--r--tools/atlas/tests/nim-bytes2human.nimble7
-rw-r--r--tools/atlas/tests/nim.cfg11
-rw-r--r--tools/atlas/tests/npeg.nimble48
-rw-r--r--tools/atlas/tests/packages/packages.json36
-rw-r--r--tools/atlas/tests/sync.nimble10
-rw-r--r--tools/atlas/tests/testes.nimble23
-rw-r--r--tools/atlas/tests/ups.nimble13
-rw-r--r--tools/debug/customdebugtype.nim72
-rw-r--r--tools/debug/nim-gdb.py (renamed from tools/nim-gdb.py)409
-rw-r--r--tools/debug/nimlldb.py1380
-rw-r--r--tools/detect/detect.nim2
-rw-r--r--tools/dochack/dochack.nim52
-rw-r--r--tools/grammar_nanny.nim1
-rw-r--r--tools/kochdocs.nim131
-rw-r--r--tools/nimgrab.nim13
-rw-r--r--tools/nimgrep.nim.cfg4
-rw-r--r--tools/niminst/buildsh.nimf1
-rw-r--r--tools/niminst/makefile.nimf4
-rw-r--r--tools/niminst/niminst.nim29
-rw-r--r--tools/officialpackages.nim21
-rw-r--r--tools/unicode_parsedata.nim70
-rw-r--r--tools/vccexe/vccexe.nim20
-rw-r--r--tools/vccexe/vcvarsall.nim7
31 files changed, 1848 insertions, 1419 deletions
diff --git a/tools/atlas/atlas.md b/tools/atlas/atlas.md
deleted file mode 100644
index a36817dc5..000000000
--- a/tools/atlas/atlas.md
+++ /dev/null
@@ -1,77 +0,0 @@
-# Atlas Package Cloner
-
-Atlas is a simple package cloner tool that automates some of the
-workflows and needs for Nim's stdlib evolution.
-
-Atlas is compatible with Nimble in the sense that it supports the Nimble
-file format.
-
-
-## How it works
-
-Atlas uses git commits internally; version requirements are translated
-to git commits via `git show-ref --tags`.
-
-Atlas uses URLs internally; Nimble package names are translated to URLs
-via Nimble's  `packages.json` file.
-
-Atlas does not call the Nim compiler for a build, instead it creates/patches
-a `nim.cfg` file for the compiler. For example:
-
-```
-############# begin Atlas config section ##########
---noNimblePath
---path:"../nimx"
---path:"../sdl2/src"
---path:"../opengl/src"
-############# end Atlas config section   ##########
-```
-
-The version selection is deterministic, it picks up the *minimum* required
-version. Thanks to this design, lock files are not required.
-
-
-## Dependencies
-
-Dependencies are neither installed globally, nor locally into the current
-project. Instead a "workspace" is used. The workspace is the nearest parent
-directory of the current directory that does not contain a `.git` subdirectory.
-Dependencies are managed as **siblings**, not as children. Dependencies are
-kept as git repositories.
-
-Thanks to this setup, it's easy to develop multiple projects at the same time.
-
-A project plus its dependencies are stored in a workspace:
-
-  $workspace / main project
-  $workspace / dependency A
-  $workspace / dependency B
-
-
-No attempts are being made at keeping directory hygiene inside the
-workspace, you're supposed to create appropriate `$workspace` directories
-at your own leisure.
-
-
-## Commands
-
-Atlas supports the following commands:
-
-
-### Clone <url>
-
-Clones a URL and all of its dependencies (recursively) into the workspace.
-Creates or patches a `nim.cfg` file with the required `--path` entries.
-
-
-### Clone <package name>
-
-The `<package name>` is translated into an URL via `packages.json` and
-then `clone <url>` is performed.
-
-
-### Search <term term2 term3 ...>
-
-Search the package index `packages.json` for a package that the given terms
-in its description (or name or list of tags).
-
diff --git a/tools/atlas/atlas.nim b/tools/atlas/atlas.nim
deleted file mode 100644
index 274e94517..000000000
--- a/tools/atlas/atlas.nim
+++ /dev/null
@@ -1,559 +0,0 @@
-#
-#           Atlas Package Cloner
-#        (c) Copyright 2021 Andreas Rumpf
-#
-#    See the file "copying.txt", included in this
-#    distribution, for details about the copyright.
-#
-
-## Simple tool to automate frequent workflows: Can "clone"
-## a Nimble dependency and its dependencies recursively.
-
-import std/[parseopt, strutils, os, osproc, unicode, tables, sets, json, jsonutils]
-import parse_requires, osutils, packagesjson
-
-const
-  Version = "0.2"
-  Usage = "atlas - Nim Package Cloner Version " & Version & """
-
-  (c) 2021 Andreas Rumpf
-Usage:
-  atlas [options] [command] [arguments]
-Command:
-  clone url|pkgname     clone a package and all of its dependencies
-  install proj.nimble   use the .nimble file to setup the project's dependencies
-  search keyw keywB...  search for package that contains the given keywords
-  extract file.nimble   extract the requirements and custom commands from
-                        the given Nimble file
-
-Options:
-  --keepCommits         do not perform any `git checkouts`
-  --cfgHere             also create/maintain a nim.cfg in the current
-                        working directory
-  --workspace=DIR       use DIR as workspace
-  --version             show the version
-  --help                show this help
-"""
-
-proc writeHelp() =
-  stdout.write(Usage)
-  stdout.flushFile()
-  quit(0)
-
-proc writeVersion() =
-  stdout.write(Version & "\n")
-  stdout.flushFile()
-  quit(0)
-
-const
-  MockupRun = defined(atlasTests)
-  TestsDir = "tools/atlas/tests"
-
-type
-  PackageName = distinct string
-  DepRelation = enum
-    normal, strictlyLess, strictlyGreater
-
-  Dependency = object
-    name: PackageName
-    url, commit: string
-    rel: DepRelation # "requires x < 1.0" is silly, but Nimble allows it so we have too.
-  AtlasContext = object
-    projectDir, workspace: string
-    hasPackageList: bool
-    keepCommits: bool
-    cfgHere: bool
-    p: Table[string, string] # name -> url mapping
-    processed: HashSet[string] # the key is (url / commit)
-    errors: int
-    when MockupRun:
-      currentDir: string
-      step: int
-      mockupSuccess: bool
-
-const
-  InvalidCommit = "<invalid commit>"
-  ProduceTest = false
-
-type
-  Command = enum
-    GitDiff = "git diff",
-    GitTags = "git show-ref --tags",
-    GitRevParse = "git rev-parse",
-    GitCheckout = "git checkout",
-    GitPull = "git pull",
-    GitCurrentCommit = "git log -n 1 --format=%H"
-    GitMergeBase = "git merge-base"
-
-include testdata
-
-proc exec(c: var AtlasContext; cmd: Command; args: openArray[string]): (string, int) =
-  when MockupRun:
-    assert TestLog[c.step].cmd == cmd, $(TestLog[c.step].cmd, cmd)
-    case cmd
-    of GitDiff, GitTags, GitRevParse, GitPull, GitCurrentCommit:
-      result = (TestLog[c.step].output, TestLog[c.step].exitCode)
-    of GitCheckout:
-      assert args[0] == TestLog[c.step].output
-    of GitMergeBase:
-      let tmp = TestLog[c.step].output.splitLines()
-      assert tmp.len == 4, $tmp.len
-      assert tmp[0] == args[0]
-      assert tmp[1] == args[1]
-      assert tmp[3] == ""
-      result[0] = tmp[2]
-      result[1] = TestLog[c.step].exitCode
-    inc c.step
-  else:
-    var cmdLine = $cmd
-    for i in 0..<args.len:
-      cmdLine.add ' '
-      cmdLine.add quoteShell(args[i])
-    result = osproc.execCmdEx(cmdLine)
-    when ProduceTest:
-      echo "cmd ", cmd, " args ", args, " --> ", result
-
-proc cloneUrl(c: var AtlasContext; url, dest: string; cloneUsingHttps: bool): string =
-  when MockupRun:
-    result = ""
-  else:
-    result = osutils.cloneUrl(url, dest, cloneUsingHttps)
-    when ProduceTest:
-      echo "cloned ", url, " into ", dest
-
-template withDir*(c: var AtlasContext; dir: string; body: untyped) =
-  when MockupRun:
-    c.currentDir = dir
-    body
-  else:
-    let oldDir = getCurrentDir()
-    try:
-      when ProduceTest:
-        echo "Current directory is now ", dir
-      setCurrentDir(dir)
-      body
-    finally:
-      setCurrentDir(oldDir)
-
-proc extractRequiresInfo(c: var AtlasContext; nimbleFile: string): NimbleFileInfo =
-  result = extractRequiresInfo(nimbleFile)
-  when ProduceTest:
-    echo "nimble ", nimbleFile, " info ", result
-
-proc toDepRelation(s: string): DepRelation =
-  case s
-  of "<": strictlyLess
-  of ">": strictlyGreater
-  else: normal
-
-proc isCleanGit(c: var AtlasContext; dir: string): string =
-  result = ""
-  let (outp, status) = exec(c, GitDiff, [])
-  if outp.len != 0:
-    result = "'git diff' not empty"
-  elif status != 0:
-    result = "'git diff' returned non-zero"
-
-proc message(c: var AtlasContext; category: string; p: PackageName; args: varargs[string]) =
-  var msg = category & "(" & p.string & ")"
-  for a in args:
-    msg.add ' '
-    msg.add a
-  stdout.writeLine msg
-  inc c.errors
-
-proc warn(c: var AtlasContext; p: PackageName; args: varargs[string]) =
-  message(c, "[Warning] ", p, args)
-
-proc error(c: var AtlasContext; p: PackageName; args: varargs[string]) =
-  message(c, "[Error] ", p, args)
-
-proc sameVersionAs(tag, ver: string): bool =
-  const VersionChars = {'0'..'9', '.'}
-
-  proc safeCharAt(s: string; i: int): char {.inline.} =
-    if i >= 0 and i < s.len: s[i] else: '\0'
-
-  let idx = find(tag, ver)
-  if idx >= 0:
-    # we found the version as a substring inside the `tag`. But we
-    # need to watch out the the boundaries are not part of a
-    # larger/different version number:
-    result = safeCharAt(tag, idx-1) notin VersionChars and
-      safeCharAt(tag, idx+ver.len) notin VersionChars
-
-proc versionToCommit(c: var AtlasContext; d: Dependency): string =
-  let (outp, status) = exec(c, GitTags, [])
-  if status == 0:
-    var useNextOne = false
-    for line in splitLines(outp):
-      let commitsAndTags = strutils.splitWhitespace(line)
-      if commitsAndTags.len == 2:
-        case d.rel
-        of normal:
-          if commitsAndTags[1].sameVersionAs(d.commit):
-            return commitsAndTags[0]
-        of strictlyLess:
-          if d.commit == InvalidCommit or not commitsAndTags[1].sameVersionAs(d.commit):
-            return commitsAndTags[0]
-        of strictlyGreater:
-          if commitsAndTags[1].sameVersionAs(d.commit):
-            useNextOne = true
-          elif useNextOne:
-            return commitsAndTags[0]
-
-  return ""
-
-proc shortToCommit(c: var AtlasContext; short: string): string =
-  let (cc, status) = exec(c, GitRevParse, [short])
-  result = if status == 0: strutils.strip(cc) else: ""
-
-proc checkoutGitCommit(c: var AtlasContext; p: PackageName; commit: string) =
-  let (_, status) = exec(c, GitCheckout, [commit])
-  if status != 0:
-    error(c, p, "could not checkout commit", commit)
-
-proc gitPull(c: var AtlasContext; p: PackageName) =
-  let (_, status) = exec(c, GitPull, [])
-  if status != 0:
-    error(c, p, "could not 'git pull'")
-
-proc updatePackages(c: var AtlasContext) =
-  if dirExists(c.workspace / PackagesDir):
-    withDir(c, c.workspace / PackagesDir):
-      gitPull(c, PackageName PackagesDir)
-  else:
-    withDir c, c.workspace:
-      let err = cloneUrl(c, "https://github.com/nim-lang/packages", PackagesDir, false)
-      if err != "":
-        error c, PackageName(PackagesDir), err
-
-proc fillPackageLookupTable(c: var AtlasContext) =
-  if not c.hasPackageList:
-    c.hasPackageList = true
-    when not MockupRun:
-      updatePackages(c)
-    let plist = getPackages(when MockupRun: TestsDir else: c.workspace)
-    for entry in plist:
-      c.p[unicode.toLower entry.name] = entry.url
-
-proc toUrl(c: var AtlasContext; p: string): string =
-  if p.isUrl:
-    result = p
-  else:
-    fillPackageLookupTable(c)
-    result = c.p.getOrDefault(unicode.toLower p)
-  if result.len == 0:
-    inc c.errors
-
-proc toName(p: string): PackageName =
-  if p.isUrl:
-    result = PackageName splitFile(p).name
-  else:
-    result = PackageName p
-
-proc needsCommitLookup(commit: string): bool {.inline.} =
-  '.' in commit or commit == InvalidCommit
-
-proc isShortCommitHash(commit: string): bool {.inline.} =
-  commit.len >= 4 and commit.len < 40
-
-proc checkoutCommit(c: var AtlasContext; w: Dependency) =
-  let dir = c.workspace / w.name.string
-  withDir c, dir:
-    if w.commit.len == 0 or cmpIgnoreCase(w.commit, "head") == 0:
-      gitPull(c, w.name)
-    else:
-      let err = isCleanGit(c, dir)
-      if err != "":
-        warn c, w.name, err
-      else:
-        let requiredCommit =
-          if needsCommitLookup(w.commit): versionToCommit(c, w)
-          elif isShortCommitHash(w.commit): shortToCommit(c, w.commit)
-          else: w.commit
-        let (cc, status) = exec(c, GitCurrentCommit, [])
-        let currentCommit = strutils.strip(cc)
-        if requiredCommit == "" or status != 0:
-          if requiredCommit == "" and w.commit == InvalidCommit:
-            warn c, w.name, "package has no tagged releases"
-          else:
-            warn c, w.name, "cannot find specified version/commit", w.commit
-        else:
-          if currentCommit != requiredCommit:
-            # checkout the later commit:
-            # git merge-base --is-ancestor <commit> <commit>
-            let (cc, status) = exec(c, GitMergeBase, [currentCommit, requiredCommit])
-            let mergeBase = strutils.strip(cc)
-            if status == 0 and (mergeBase == currentCommit or mergeBase == requiredCommit):
-              # conflict resolution: pick the later commit:
-              if mergeBase == currentCommit:
-                checkoutGitCommit(c, w.name, requiredCommit)
-            else:
-              checkoutGitCommit(c, w.name, requiredCommit)
-              when false:
-                warn c, w.name, "do not know which commit is more recent:",
-                  currentCommit, "(current) or", w.commit, " =", requiredCommit, "(required)"
-
-proc findNimbleFile(c: AtlasContext; dep: Dependency): string =
-  when MockupRun:
-    result = TestsDir / dep.name.string & ".nimble"
-    doAssert fileExists(result), "file does not exist " & result
-  else:
-    result = c.workspace / dep.name.string / (dep.name.string & ".nimble")
-    if not fileExists(result):
-      result = ""
-      for x in walkFiles(c.workspace / dep.name.string / "*.nimble"):
-        if result.len == 0:
-          result = x
-        else:
-          # ambiguous .nimble file
-          return ""
-
-proc addUniqueDep(c: var AtlasContext; work: var seq[Dependency];
-                  tokens: seq[string]) =
-  let oldErrors = c.errors
-  let url = toUrl(c, tokens[0])
-  if oldErrors != c.errors:
-    warn c, toName(tokens[0]), "cannot resolve package name"
-  elif not c.processed.containsOrIncl(url / tokens[2]):
-    work.add Dependency(name: toName(tokens[0]), url: url, commit: tokens[2],
-                        rel: toDepRelation(tokens[1]))
-
-template toDestDir(p: PackageName): string = p.string
-
-proc collectDeps(c: var AtlasContext; work: var seq[Dependency];
-                 dep: Dependency; nimbleFile: string): string =
-  # If there is a .nimble file, return the dependency path & srcDir
-  # else return "".
-  assert nimbleFile != ""
-  let nimbleInfo = extractRequiresInfo(c, nimbleFile)
-  for r in nimbleInfo.requires:
-    var tokens: seq[string] = @[]
-    for token in tokenizeRequires(r):
-      tokens.add token
-    if tokens.len == 1:
-      # nimx uses dependencies like 'requires "sdl2"'.
-      # Via this hack we map them to the first tagged release.
-      # (See the `isStrictlySmallerThan` logic.)
-      tokens.add "<"
-      tokens.add InvalidCommit
-    elif tokens.len == 2 and tokens[1].startsWith("#"):
-      # Dependencies can also look like 'requires "sdl2#head"
-      var commit = tokens[1][1 .. ^1]
-      tokens[1] = "=="
-      tokens.add commit
-
-    if tokens.len >= 3 and cmpIgnoreCase(tokens[0], "nim") != 0:
-      c.addUniqueDep work, tokens
-  result = toDestDir(dep.name) / nimbleInfo.srcDir
-
-proc collectNewDeps(c: var AtlasContext; work: var seq[Dependency];
-                    dep: Dependency; result: var seq[string];
-                    isMainProject: bool) =
-  let nimbleFile = findNimbleFile(c, dep)
-  if nimbleFile != "":
-    let x = collectDeps(c, work, dep, nimbleFile)
-    result.add x
-  else:
-    result.add toDestDir(dep.name)
-
-proc cloneLoop(c: var AtlasContext; work: var seq[Dependency]): seq[string] =
-  result = @[]
-  var i = 0
-  while i < work.len:
-    let w = work[i]
-    let destDir = toDestDir(w.name)
-    let oldErrors = c.errors
-
-    if not dirExists(c.workspace / destDir):
-      withDir c, c.workspace:
-        let err = cloneUrl(c, w.url, destDir, false)
-        if err != "":
-          error c, w.name, err
-    if oldErrors == c.errors:
-      if not c.keepCommits: checkoutCommit(c, w)
-      # even if the checkout fails, we can make use of the somewhat
-      # outdated .nimble file to clone more of the most likely still relevant
-      # dependencies:
-      collectNewDeps(c, work, w, result, i == 0)
-    inc i
-
-proc clone(c: var AtlasContext; start: string): seq[string] =
-  # non-recursive clone.
-  let url = toUrl(c, start)
-  var work = @[Dependency(name: toName(start), url: url, commit: "")]
-
-  if url == "":
-    error c, toName(start), "cannot resolve package name"
-    return
-
-  c.projectDir = c.workspace / toDestDir(work[0].name)
-  result = cloneLoop(c, work)
-
-const
-  configPatternBegin = "############# begin Atlas config section ##########\n"
-  configPatternEnd =   "############# end Atlas config section   ##########\n"
-
-proc patchNimCfg(c: var AtlasContext; deps: seq[string]; cfgPath: string) =
-  var paths = "--noNimblePath\n"
-  for d in deps:
-    let pkgname = toDestDir d.PackageName
-    let x = relativePath(c.workspace / pkgname, cfgPath, '/')
-    paths.add "--path:\"" & x & "\"\n"
-  var cfgContent = configPatternBegin & paths & configPatternEnd
-
-  when MockupRun:
-    assert readFile(TestsDir / "nim.cfg") == cfgContent
-    c.mockupSuccess = true
-  else:
-    let cfg = cfgPath / "nim.cfg"
-    if cfgPath.len > 0 and not dirExists(cfgPath):
-      error(c, c.projectDir.PackageName, "could not write the nim.cfg")
-    elif not fileExists(cfg):
-      writeFile(cfg, cfgContent)
-    else:
-      let content = readFile(cfg)
-      let start = content.find(configPatternBegin)
-      if start >= 0:
-        cfgContent = content.substr(0, start-1) & cfgContent
-        let theEnd = content.find(configPatternEnd, start)
-        if theEnd >= 0:
-          cfgContent.add content.substr(theEnd+len(configPatternEnd))
-      else:
-        cfgContent = content & "\n" & cfgContent
-      if cfgContent != content:
-        # do not touch the file if nothing changed
-        # (preserves the file date information):
-        writeFile(cfg, cfgContent)
-
-proc error*(msg: string) =
-  when defined(debug):
-    writeStackTrace()
-  quit "[Error] " & msg
-
-proc findSrcDir(c: var AtlasContext): string =
-  for nimbleFile in walkPattern("*.nimble"):
-    let nimbleInfo = extractRequiresInfo(c, nimbleFile)
-    return nimbleInfo.srcDir
-  return ""
-
-proc installDependencies(c: var AtlasContext; nimbleFile: string) =
-  # 1. find .nimble file in CWD
-  # 2. install deps from .nimble
-  var work: seq[Dependency] = @[]
-  let (path, pkgname, _) = splitFile(nimbleFile)
-  let dep = Dependency(name: toName(pkgname), url: "", commit: "")
-  discard collectDeps(c, work, dep, nimbleFile)
-  let paths = cloneLoop(c, work)
-  patchNimCfg(c, paths, if c.cfgHere: getCurrentDir() else: findSrcDir(c))
-
-proc main =
-  var action = ""
-  var args: seq[string] = @[]
-  template singleArg() =
-    if args.len != 1:
-      error action & " command takes a single package name"
-
-  template noArgs() =
-    if args.len != 0:
-      error action & " command takes no arguments"
-
-  var c = AtlasContext(
-    projectDir: getCurrentDir(),
-    workspace: "")
-
-  for kind, key, val in getopt():
-    case kind
-    of cmdArgument:
-      if action.len == 0:
-        action = key.normalize
-      else:
-        args.add key
-    of cmdLongOption, cmdShortOption:
-      case normalize(key)
-      of "help", "h": writeHelp()
-      of "version", "v": writeVersion()
-      of "keepcommits": c.keepCommits = true
-      of "workspace":
-        if val.len > 0:
-          c.workspace = val
-          createDir(val)
-        else:
-          writeHelp()
-      of "cfghere": c.cfgHere = true
-      else: writeHelp()
-    of cmdEnd: assert false, "cannot happen"
-
-  if c.workspace.len > 0:
-    if not dirExists(c.workspace): error "Workspace directory '" & c.workspace & "' not found."
-  else:
-    c.workspace = getCurrentDir()
-    while c.workspace.len > 0 and dirExists(c.workspace / ".git"):
-      c.workspace = c.workspace.parentDir()
-  echo "Using workspace ", c.workspace
-
-  case action
-  of "":
-    error "No action."
-  of "clone":
-    singleArg()
-    let deps = clone(c, args[0])
-    patchNimCfg c, deps, if c.cfgHere: getCurrentDir() else: findSrcDir(c)
-    when MockupRun:
-      if not c.mockupSuccess:
-        error "There were problems."
-    else:
-      if c.errors > 0:
-        error "There were problems."
-  of "install":
-    if args.len > 1:
-      error "install command takes a single argument"
-    var nimbleFile = ""
-    if args.len == 1:
-      nimbleFile = args[0]
-    else:
-      for x in walkPattern("*.nimble"):
-        nimbleFile = x
-        break
-    if nimbleFile.len == 0:
-      error "could not find a .nimble file"
-    installDependencies(c, nimbleFile)
-  of "refresh":
-    noArgs()
-    updatePackages(c)
-  of "search", "list":
-    updatePackages(c)
-    search getPackages(c.workspace), args
-  of "extract":
-    singleArg()
-    if fileExists(args[0]):
-      echo toJson(extractRequiresInfo(args[0]))
-    else:
-      error "File does not exist: " & args[0]
-  else:
-    error "Invalid action: " & action
-
-when isMainModule:
-  main()
-
-when false:
-  # some testing code for the `patchNimCfg` logic:
-  var c = AtlasContext(
-    projectDir: getCurrentDir(),
-    workspace: getCurrentDir().parentDir)
-
-  patchNimCfg(c, @[PackageName"abc", PackageName"xyz"])
-
-when false:
-  assert sameVersionAs("v0.2.0", "0.2.0")
-  assert sameVersionAs("v1", "1")
-
-  assert sameVersionAs("1.90", "1.90")
-
-  assert sameVersionAs("v1.2.3-zuzu", "1.2.3")
-  assert sameVersionAs("foo-1.2.3.4", "1.2.3.4")
-
-  assert not sameVersionAs("foo-1.2.3.4", "1.2.3")
-  assert not sameVersionAs("foo", "1.2.3")
-  assert not sameVersionAs("", "1.2.3")
diff --git a/tools/atlas/osutils.nim b/tools/atlas/osutils.nim
deleted file mode 100644
index 6134830b5..000000000
--- a/tools/atlas/osutils.nim
+++ /dev/null
@@ -1,51 +0,0 @@
-## OS utilities like 'withDir'.
-## (c) 2021 Andreas Rumpf
-
-import os, strutils, osproc
-
-proc isUrl*(x: string): bool =
-  x.startsWith("git://") or x.startsWith("https://") or x.startsWith("http://")
-
-proc cloneUrl*(url, dest: string; cloneUsingHttps: bool): string =
-  ## Returns an error message on error or else "".
-  result = ""
-  var modUrl =
-    if url.startsWith("git://") and cloneUsingHttps:
-      "https://" & url[6 .. ^1]
-    else: url
-
-  # github + https + trailing url slash causes a
-  # checkout/ls-remote to fail with Repository not found
-  var isGithub = false
-  if modUrl.contains("github.com") and modUrl.endsWith("/"):
-    modUrl = modUrl[0 .. ^2]
-    isGithub = true
-
-  let (_, exitCode) = execCmdEx("git ls-remote --quiet --tags " & modUrl)
-  var xcode = exitCode
-  if isGithub and exitCode != QuitSuccess:
-    # retry multiple times to avoid annoying github timeouts:
-    for i in 0..4:
-      os.sleep(4000)
-      xcode = execCmdEx("git ls-remote --quiet --tags " & modUrl)[1]
-      if xcode == QuitSuccess: break
-
-  if xcode == QuitSuccess:
-    # retry multiple times to avoid annoying github timeouts:
-    let cmd = "git clone " & modUrl & " " & dest
-    for i in 0..4:
-      if execShellCmd(cmd) == 0: return ""
-      os.sleep(4000)
-    result = "exernal program failed: " & cmd
-  elif not isGithub:
-    let (_, exitCode) = execCmdEx("hg identify " & modUrl)
-    if exitCode == QuitSuccess:
-      let cmd = "hg clone " & modUrl & " " & dest
-      for i in 0..4:
-        if execShellCmd(cmd) == 0: return ""
-        os.sleep(4000)
-      result = "exernal program failed: " & cmd
-    else:
-      result = "Unable to identify url: " & modUrl
-  else:
-    result = "Unable to identify url: " & modUrl
diff --git a/tools/atlas/packagesjson.nim b/tools/atlas/packagesjson.nim
deleted file mode 100644
index 0b8599769..000000000
--- a/tools/atlas/packagesjson.nim
+++ /dev/null
@@ -1,114 +0,0 @@
-
-import std / [json, os, sets, strutils]
-
-type
-  Package* = ref object
-    # Required fields in a package.
-    name*: string
-    url*: string # Download location.
-    license*: string
-    downloadMethod*: string
-    description*: string
-    tags*: seq[string] # \
-    # From here on, optional fields set to the empty string if not available.
-    version*: string
-    dvcsTag*: string
-    web*: string # Info url for humans.
-
-proc optionalField(obj: JsonNode, name: string, default = ""): string =
-  if hasKey(obj, name) and obj[name].kind == JString:
-    result = obj[name].str
-  else:
-    result = default
-
-proc requiredField(obj: JsonNode, name: string): string =
-  result = optionalField(obj, name, "")
-
-proc fromJson*(obj: JSonNode): Package =
-  result = Package()
-  result.name = obj.requiredField("name")
-  if result.name.len == 0: return nil
-  result.version = obj.optionalField("version")
-  result.url = obj.requiredField("url")
-  if result.url.len == 0: return nil
-  result.downloadMethod = obj.requiredField("method")
-  if result.downloadMethod.len == 0: return nil
-  result.dvcsTag = obj.optionalField("dvcs-tag")
-  result.license = obj.optionalField("license")
-  result.tags = @[]
-  for t in obj["tags"]:
-    result.tags.add(t.str)
-  result.description = obj.requiredField("description")
-  result.web = obj.optionalField("web")
-
-const PackagesDir* = "packages"
-
-proc getPackages*(workspaceDir: string): seq[Package] =
-  result = @[]
-  var uniqueNames = initHashSet[string]()
-  var jsonFiles = 0
-  for kind, path in walkDir(workspaceDir / PackagesDir):
-    if kind == pcFile and path.endsWith(".json"):
-      inc jsonFiles
-      let packages = json.parseFile(path)
-      for p in packages:
-        let pkg = p.fromJson()
-        if pkg != nil and not uniqueNames.containsOrIncl(pkg.name):
-          result.add(pkg)
-
-proc `$`*(pkg: Package): string =
-  result = pkg.name & ":\n"
-  result &= "  url:         " & pkg.url & " (" & pkg.downloadMethod & ")\n"
-  result &= "  tags:        " & pkg.tags.join(", ") & "\n"
-  result &= "  description: " & pkg.description & "\n"
-  result &= "  license:     " & pkg.license & "\n"
-  if pkg.web.len > 0:
-    result &= "  website:     " & pkg.web & "\n"
-
-proc search*(pkgList: seq[Package]; terms: seq[string]) =
-  var found = false
-  template onFound =
-    echo pkg
-    found = true
-    break forPackage
-
-  for pkg in pkgList:
-    if terms.len > 0:
-      block forPackage:
-        for term in terms:
-          let word = term.toLower
-          # Search by name.
-          if word in pkg.name.toLower:
-            onFound()
-          # Search by tag.
-          for tag in pkg.tags:
-            if word in tag.toLower:
-              onFound()
-    else:
-      echo(pkg)
-
-  if not found and terms.len > 0:
-    echo("No package found.")
-
-type PkgCandidates* = array[3, seq[Package]]
-
-proc determineCandidates*(pkgList: seq[Package];
-                         terms: seq[string]): PkgCandidates =
-  result[0] = @[]
-  result[1] = @[]
-  result[2] = @[]
-  for pkg in pkgList:
-    block termLoop:
-      for term in terms:
-        let word = term.toLower
-        if word == pkg.name.toLower:
-          result[0].add pkg
-          break termLoop
-        elif word in pkg.name.toLower:
-          result[1].add pkg
-          break termLoop
-        else:
-          for tag in pkg.tags:
-            if word in tag.toLower:
-              result[2].add pkg
-              break termLoop
diff --git a/tools/atlas/parse_requires.nim b/tools/atlas/parse_requires.nim
index 7e26a1656..66879d04f 100644
--- a/tools/atlas/parse_requires.nim
+++ b/tools/atlas/parse_requires.nim
@@ -2,7 +2,7 @@
 ## (c) 2021 Andreas Rumpf
 
 import std / strutils
-import ".." / compiler / [ast, idents, msgs, syntaxes, options, pathutils]
+import ".." / ".." / compiler / [ast, idents, msgs, syntaxes, options, pathutils]
 
 type
   NimbleFileInfo* = object
diff --git a/tools/atlas/testdata.nim b/tools/atlas/testdata.nim
deleted file mode 100644
index aefaeacd2..000000000
--- a/tools/atlas/testdata.nim
+++ /dev/null
@@ -1,63 +0,0 @@
-
-type
-  PerDirData = object
-    dirname: string
-    cmd: Command
-    exitCode: int
-    output: string
-
-template toData(a, b, c, d): untyped =
-  PerDirData(dirname: a, cmd: b, exitCode: c, output: d)
-
-const
-  TestLog = [
-    toData("balls", GitPull, 0, "Already up to date.\n"),
-    toData("grok", GitDiff, 0, ""),
-    toData("grok", GitTags, 0, "2ca193c31fa2377c1e991a080d60ca3215ff6cf0 refs/tags/0.0.1\n48007554b21ba2f65c726ae2fdda88d621865b4a refs/tags/0.0.2\n7092a0286421c7818cd335cca9ebc72d03d866c2 refs/tags/0.0.3\n62707b8ac684efac35d301dbde57dc750880268e refs/tags/0.0.4\n876f2504e0c2f785ffd2cf65a78e2aea474fa8aa refs/tags/0.0.5\nb7eb1f2501aa2382cb3a38353664a13af62a9888 refs/tags/0.0.6\nf5d818bfd6038884b3d8b531c58484ded20a58a4 refs/tags/0.1.0\n961eaddea49c3144d130d105195583d3f11fb6c6 refs/tags/0.2.0\n15ab8ed8d4f896232a976a9008548bd53af72a66 refs/tags/0.2.1\n426a7d7d4603f77ced658e73ad7f3f582413f6cd refs/tags/0.3.0\n83cf7a39b2fe897786fb0fe01a7a5933c3add286 refs/tags/0.3.1\n8d2e3c900edbc95fa0c036fd76f8e4f814aef2c1 refs/tags/0.3.2\n48b43372f49a3bb4dc0969d82a0fca183fb94662 refs/tags/0.3.3\n9ca947a3009ea6ba17814b20eb953272064eb2e6 refs/tags/0.4.0\n1b5643d04fba6d996a16d1ffc13d034a40003f8f refs/tags/0.5.0\n486b0eb580b1c465453d264ac758cc490c19c33e refs/tags/0.5.1\naedb0d9497390e20b9d2541cef2bb05a5cda7a71 refs/tags/0.5.2\n"),
-    toData("grok", GitCurrentCommit, 0, "349c15fd1e03f1fcdd81a1edefba3fa6116ab911\n"),
-    toData("grok", GitMergeBase, 0, "349c15fd1e03f1fcdd81a1edefba3fa6116ab911\n1b5643d04fba6d996a16d1ffc13d034a40003f8f\n349c15fd1e03f1fcdd81a1edefba3fa6116ab911\n"),
-    toData("grok", GitCheckout, 0, "1b5643d04fba6d996a16d1ffc13d034a40003f8f"), # watch out!
-
-    toData("ups", GitDiff, 0, ""),
-    toData("ups", GitTags, 0, "4008f9339cd22b30e180bc87a6cca7270fd28ac1 refs/tags/0.0.2\n19bc490c22b4f5b0628c31cdedead1375b279356 refs/tags/0.0.3\nff34602aaea824cb46d6588cd5fe1178132e9702 refs/tags/0.0.4\n09de599138f20b745133b6e4fe563e204415a7e8 refs/tags/0.0.5\n85fee3b74798311108a105635df31f892150f5d0 refs/tags/0.0.6\nfd303913b22b121dc42f332109e9c44950b9acd4 refs/tags/0.0.7\n"),
-    toData("ups", GitCurrentCommit, 0, "74c31af8030112dac758440aa51ef175992f71f3\n"),
-    toData("ups", GitMergeBase, 0, "74c31af8030112dac758440aa51ef175992f71f3\n4008f9339cd22b30e180bc87a6cca7270fd28ac1\n74c31af8030112dac758440aa51ef175992f71f3\n"),
-    toData("ups", GitCheckout, 0, "4008f9339cd22b30e180bc87a6cca7270fd28ac1"),
-
-    toData("sync", GitDiff, 0, ""),
-    toData("sync", GitRevParse, 0, "810bd2d75e9f6e182534ae2488670b51a9f13fc3\n"),
-    toData("sync", GitCurrentCommit, 0, "de5c7337ebc22422190e8aeca37d05651735f440\n"),
-    toData("sync", GitMergeBase, 0, "de5c7337ebc22422190e8aeca37d05651735f440\n810bd2d75e9f6e182534ae2488670b51a9f13fc3\n810bd2d75e9f6e182534ae2488670b51a9f13fc3\n"),
-
-    toData("npeg", GitDiff, 0, ""),
-    toData("npeg", GitTags, 0, "8df2f0c9391995fd086b8aab00e8ab7aded1e8f0 refs/tags/0.1.0\n4c959a72db5283b55eeef491076eefb5e02316f1 refs/tags/0.10.0\n802f47c0f7f4318a4f0858ba5a6a6ed2333bde71 refs/tags/0.11.0\n82c8d92837108dce225358ace2c416bf9a3f30ce refs/tags/0.12.0\n87d2f2c4f6ef7da350d45beb5a336611bde7f518 refs/tags/0.13.0\n39964f0d220bfaade47a568bf03c1cf28aa2bc37 refs/tags/0.14.0\nbe9f03f92304cbeab70572944a8563db9b23b2fb refs/tags/0.14.1\na933fb9832566fc95273e417597bfb4faf564ca6 refs/tags/0.15.0\n6aad2e438c52ff0636c7bfb64338e444ac3e83ba refs/tags/0.16.0\nf4ddffb5848c42c6151743dd9c7eddcaaabc56cc refs/tags/0.17.0\n30b446b39442cdbc53a97018ab8a54149aa7c3b7 refs/tags/0.17.1\n1a9d36aa3b34a6169d4530463f1c17a3fe1e075e refs/tags/0.18.0\ndd34f903a9a63b876cb2db19b7a4ce0bcc252134 refs/tags/0.19.0\nd93d49c81fc8722d7929ac463b435c0f2e10c53b refs/tags/0.2.0\neeae7746c9b1118bcf27744ab2aee26969051256 refs/tags/0.20.0\n8c3471a548129f3bf62df15cd0fd8cca1787d852 refs/tags/0.21.0\nc0e873a17bc713c80e74fec3c30cb62dcd5d194a refs/tags/0.21.1\nbae84c47a1bb259b209b6f6be1582327b784539d refs/tags/0.21.2\nbfcb4bcae76a917c3c88736ca773e4cb67dbb2d8 refs/tags/0.21.3\n0eabb7c462d30932049f0b7e6a030c1562cf9fee refs/tags/0.22.0\n2e75367095f54d4351005078bad98041a55b14c1 refs/tags/0.22.1\n814ea235dd398108d7b18f966694c3d951575701 refs/tags/0.22.2\na812064587d983c129737f8500bf74990e6b8dab refs/tags/0.23.0\nbd969ad3745db0d66022564cac76cf9424651104 refs/tags/0.23.1\na037c646a47623b92718efadc2bb74d03664b360 refs/tags/0.23.2\n078475ccceeaca0fac947492acdd24514da8d863 refs/tags/0.24.0\ne7bd87dc992512fd5825a557a56907647e03c979 refs/tags/0.24.1\n45ea601e1c7f64fb857bc99df984b86673621d2c refs/tags/0.3.0\n1ea9868a3fee3aa487ab7ec9129208a4dd483d0d refs/tags/0.4.0\n39afdb5733d3245386d29d08c5ff61c89268f499 refs/tags/0.5.0\n458c7b5910fcb157af3fc51bc3b3e663fdb3ed4a refs/tags/0.6.0\n06c38bd8563d822455bc237c2a98c153d938ed1b refs/tags/0.7.0\nf446b6056eef6d8dc9d8b47a79aca93d17dc8230 refs/tags/0.8.0\nbb25a195133f9f7af06386d0809793923cc5e8ab refs/tags/0.9.0\n"),
-    toData("npeg", GitCurrentCommit, 0, "5d80f93aa720898936668b3bc47d0fff101ec414\n"),
-    toData("npeg", GitMergeBase, 0, "5d80f93aa720898936668b3bc47d0fff101ec414\na037c646a47623b92718efadc2bb74d03664b360\na037c646a47623b92718efadc2bb74d03664b360\n"),
-
-    toData("testes", GitDiff, 0, ""),
-    toData("testes", GitTags, 0, "3ce9b2968b5f644755a0ced1baa3eece88c2f12e refs/tags/0.1.0\nf73af8318b54737678fab8b54bdcd8a451015e0d refs/tags/0.1.1\nd21d84d37b161a123a43318bae353108755916de refs/tags/0.1.2\n5c36b6095353ed03b08ac939d00aff2d73f79a35 refs/tags/0.1.3\na1220d11237ee8f135f772ff9731c11b2d91ba31 refs/tags/0.1.4\n574f741b90d04a7ce8c9b990e6077708d7ad076e refs/tags/0.1.5\nced0a9e58234b680def6931578e09165a32e6291 refs/tags/0.1.6\nbb248952e8742a6011eb1a45a9d2059aeb0341d7 refs/tags/0.1.7\nabb7d7c552da0a8e0ddc586c15ccf7e74b0d068b refs/tags/0.10.0\n6e42a768a90d6442196b344bcdcb6f834b76e7b7 refs/tags/0.2.0\n9d136c3a0851ca2c021f5fb4f7b63f0a0ef77232 refs/tags/0.2.1\ndcb282b2da863fd2939e1969cec7a99788feb456 refs/tags/0.2.2\nf708a632afaa40a322a1a61c1c13722edac8e8c5 refs/tags/0.3.0\n3213f59e3f9ba052452c59f01d1418360d856af6 refs/tags/0.3.1\nf7bb1743dffd327958dfcebae4cfb6f61cc1cb8c refs/tags/0.3.2\n6b64569ebecad6bc60cc8697713701e7659204f4 refs/tags/0.3.3\nb51c25a4367bd17f419f78cb5a27f319e9d820f5 refs/tags/0.3.4\nb265612710cbd5ddb1b173c94ece8ec5c7ceccac refs/tags/0.3.5\ne404bcfe42e92d7509717a2dfa115cacb4964c5d refs/tags/0.3.6\n5e4d0d5b7e7f314dde701c546c4365c59782d3dc refs/tags/0.3.7\ne13f91c9c913d2b81c59adeaad687efa2b35293a refs/tags/0.3.8\n17599625f09af0ae4b525e63ab726a3002540702 refs/tags/0.3.9\n13e907f70571dd146d8dc29ddec4599b40ba4e85 refs/tags/0.4.0\n155a74cf676495df1e0674dd07b5e4a0291a9a4a refs/tags/0.4.1\nf37abccdc148cb02ca637a6f0bc8821491cce358 refs/tags/0.4.2\n0250d29ebdd02f28f9020445adb5a4e51fd1902c refs/tags/0.5.0\n2fb87db6d9f34109a70205876030c53f815739b7 refs/tags/0.5.1\n629d17ba8d6a1a4eca8145eb089ed5bca4473dfc refs/tags/0.6.0\ne926130f5f1b7903f68be49cc1563225bd9d948d refs/tags/0.7.0\n7365303897e6185796c274425c079916047e3f14 refs/tags/0.7.1\na735c4adabeba637409f41c4325dd8fc5fb91e2d refs/tags/0.7.10\nfe023fd27404889c5122f902456cbba14b767405 refs/tags/0.7.11\n4430e72972c77a5e9c1555d59bba11d840682691 refs/tags/0.7.12\nf0e53eb490a9558c7f594d2e095b70665e36ca88 refs/tags/0.7.13\nf6520e25e7c329c2957cda447f149fc6a930db0d refs/tags/0.7.2\nd509762f7191757c240d3c79c9ecda53f8c0cfe3 refs/tags/0.7.3\nc02e7a783d1c42fd1f91bca7142f7c3733950c05 refs/tags/0.7.4\n8c8a9e496e9b86ba7602709438980ca31e6989d9 refs/tags/0.7.5\n29839c18b4ac83c0111a178322b57ebb8a8d402c refs/tags/0.7.6\n3b62973cf74fafd8ea906644d89ac34d29a8a6cf refs/tags/0.7.7\ne67ff99dc43c391e89a37f97a9d298c3428bbde2 refs/tags/0.7.8\n4b72ecda0d40ed8e5ab8ad4095a0691d30ec6cd0 refs/tags/0.7.9\n2512b8cc3d7f001d277e89978da2049a5feee5c4 refs/tags/0.8.0\n86c47029690bd2731d204245f3f54462227bba0d refs/tags/0.9.0\n9a7f94f78588e9b5ba7ca077e1f7eae0607c6cf6 refs/tags/0.9.1\n08c915dc016d16c1dfa9a77d0b045ec29c9f2074 refs/tags/0.9.2\n3fb658b1ce1e1efa37d6f9f14322bdac8def02a5 refs/tags/0.9.3\n738fda0add962379ffe6aa6ca5f01a6943a98a2e refs/tags/0.9.4\n48d821add361f7ad768ecb35a0b19c38f90c919e refs/tags/0.9.5\nff9ae890f597dac301b2ac6e6805eb9ac5afd49a refs/tags/0.9.6\n483c78f06e60b0ec5e79fc3476df075ee7286890 refs/tags/0.9.7\n416eec87a5ae39a1a6035552e9e9a47d76b13026 refs/tags/1.0.0\na935cfe9445cc5218fbdd7e0afb35aa1587fff61 refs/tags/1.0.1\n4b83863a9181f054bb695b11b5d663406dfd85d2 refs/tags/1.0.2\n295145fddaa4fe29c1e71a5044d968a84f9dbf69 refs/tags/1.1.0\n8f74ea4e5718436c47305b4488842e6458a13dac refs/tags/1.1.1\n4135bb291e53d615a976e997c44fb2bd9e1ad343 refs/tags/1.1.10\n8c09dbcd16612f5989065db02ea2e7a752dd2656 refs/tags/1.1.11\naedfebdb6c016431d84b0c07cf181b957a900640 refs/tags/1.1.12\n2c2e958366ef6998115740bdf110588d730e5738 refs/tags/1.1.2\nbecc77258321e6ec40d89efdddf37bafd0d07fc3 refs/tags/1.1.3\ne070d7c9853bf94c35b81cf0c0a8980c2449bb22 refs/tags/1.1.4\n12c986cbbf65e8571a486e9230808bf887e5f04f refs/tags/1.1.5\n63df8986f5b56913b02d26954fa033eeaf43714c refs/tags/1.1.6\n38e02c9c6bd728b043036fe0d1894d774cab3108 refs/tags/1.1.7\n3c3879fff16450d28ade79a6b08982bf5cefc061 refs/tags/1.1.8\ne32b811b3b2e70a1d189d7a663bc2583e9c18f96 refs/tags/1.1.9\n0c1b4277c08197ce7e7e0aa2bad91d909fcd96ac refs/tags/2.0.0\n"),
-    toData("testes", GitCurrentCommit, 0, "d9db2ad09aa38fc26625341e1b666602959e144f\n"),
-    toData("testes", GitMergeBase, 0, "d9db2ad09aa38fc26625341e1b666602959e144f\n416eec87a5ae39a1a6035552e9e9a47d76b13026\nd9db2ad09aa38fc26625341e1b666602959e144f\n"),
-    toData("testes", GitCheckout, 0, "416eec87a5ae39a1a6035552e9e9a47d76b13026"),
-
-    toData("grok", GitDiff, 0, ""),
-    toData("grok", GitTags, 0, "2ca193c31fa2377c1e991a080d60ca3215ff6cf0 refs/tags/0.0.1\n48007554b21ba2f65c726ae2fdda88d621865b4a refs/tags/0.0.2\n7092a0286421c7818cd335cca9ebc72d03d866c2 refs/tags/0.0.3\n62707b8ac684efac35d301dbde57dc750880268e refs/tags/0.0.4\n876f2504e0c2f785ffd2cf65a78e2aea474fa8aa refs/tags/0.0.5\nb7eb1f2501aa2382cb3a38353664a13af62a9888 refs/tags/0.0.6\nf5d818bfd6038884b3d8b531c58484ded20a58a4 refs/tags/0.1.0\n961eaddea49c3144d130d105195583d3f11fb6c6 refs/tags/0.2.0\n15ab8ed8d4f896232a976a9008548bd53af72a66 refs/tags/0.2.1\n426a7d7d4603f77ced658e73ad7f3f582413f6cd refs/tags/0.3.0\n83cf7a39b2fe897786fb0fe01a7a5933c3add286 refs/tags/0.3.1\n8d2e3c900edbc95fa0c036fd76f8e4f814aef2c1 refs/tags/0.3.2\n48b43372f49a3bb4dc0969d82a0fca183fb94662 refs/tags/0.3.3\n9ca947a3009ea6ba17814b20eb953272064eb2e6 refs/tags/0.4.0\n1b5643d04fba6d996a16d1ffc13d034a40003f8f refs/tags/0.5.0\n486b0eb580b1c465453d264ac758cc490c19c33e refs/tags/0.5.1\naedb0d9497390e20b9d2541cef2bb05a5cda7a71 refs/tags/0.5.2\n"),
-    toData("grok", GitCurrentCommit, 0, "4e6526a91a23eaec778184e16ce9a34d25d48bdc\n"),
-    toData("grok", GitMergeBase, 0, "4e6526a91a23eaec778184e16ce9a34d25d48bdc\n62707b8ac684efac35d301dbde57dc750880268e\n349c15fd1e03f1fcdd81a1edefba3fa6116ab911\n"),
-    toData("grok", GitCheckout, 0, "62707b8ac684efac35d301dbde57dc750880268e"),
-
-    toData("nim-bytes2human", GitDiff, 0, ""),
-    toData("nim-bytes2human", GitTags, 0, ""),
-    toData("nim-bytes2human", GitCurrentcommit, 0, "ec2c1a758cabdd4751a06c8ebf2b923f19e32731\n")
-  ]
-
-#[
-Current directory is now E:\atlastest\nim-bytes2human
-cmd git diff args [] --> ("", 0)
-cmd git show-ref --tags args [] --> ("", 1)
-cmd git log -n 1 --format=%H args [] --> (, 0)
-[Warning] (nim-bytes2human) package has no tagged releases
-nimble E:\atlastest\nim-bytes2human\bytes2human.nimble info (requires: @["nim >= 1.0.0"], srcDir: "src", tasks: @[])
-[Error] There were problems.
-Error: execution of an external program failed: 'E:\nim\tools\atlas\atlas.exe clone https://github.com/disruptek/balls'
-]#
diff --git a/tools/atlas/tests/balls.nimble b/tools/atlas/tests/balls.nimble
deleted file mode 100644
index 143e757e9..000000000
--- a/tools/atlas/tests/balls.nimble
+++ /dev/null
@@ -1,32 +0,0 @@
-version = "3.4.1"
-author = "disruptek"
-description = "a unittest framework with balls 🔴🟡🟢"
-license = "MIT"
-
-# requires newTreeFrom
-requires "https://github.com/disruptek/grok >= 0.5.0 & < 1.0.0"
-requires "https://github.com/disruptek/ups < 1.0.0"
-requires "https://github.com/planetis-m/sync#810bd2d"
-#requires "https://github.com/c-blake/cligen < 2.0.0"
-
-bin = @["balls"]            # build the binary for basic test running
-installExt = @["nim"]       # we need to install balls.nim also
-skipDirs = @["tests"]       # so stupid...  who doesn't want tests?
-#installFiles = @["balls.nim"] # https://github.com/nim-lang/Nim/issues/16661
-
-task test, "run tests for ci":
-  when defined(windows):
-    exec "balls.cmd"
-  else:
-    exec "balls"
-
-task demo, "produce a demo":
-  exec "nim c --define:release balls.nim"
-  when (NimMajor, NimMinor) != (1, 0):
-    echo "due to nim bug #16307, use nim-1.0"
-    quit 1
-  exec """demo docs/demo.svg "nim c --out=\$1 examples/fails.nim""""
-  exec """demo docs/clean.svg "nim c --define:danger -f --out=\$1 tests/test.nim""""
-  exec "nim c --define:release --define:ballsDry balls.nim"
-  exec """demo docs/runner.svg "balls""""
-
diff --git a/tools/atlas/tests/grok.nimble b/tools/atlas/tests/grok.nimble
deleted file mode 100644
index 1b6d77c08..000000000
--- a/tools/atlas/tests/grok.nimble
+++ /dev/null
@@ -1,5 +0,0 @@
-version = "0.0.4"
-author = "disruptek"
-description = "don't read too much into it"
-license = "MIT"
-requires "nim >= 1.0.0"
diff --git a/tools/atlas/tests/nim-bytes2human.nimble b/tools/atlas/tests/nim-bytes2human.nimble
deleted file mode 100644
index 9f3ae2479..000000000
--- a/tools/atlas/tests/nim-bytes2human.nimble
+++ /dev/null
@@ -1,7 +0,0 @@
-version     = "0.2.2"
-author      = "Juan Carlos"
-description = "Convert bytes to kilobytes, megabytes, gigabytes, etc."
-license     = "MIT"
-srcDir      = "src"
-
-requires "nim >= 1.0.0"  # https://github.com/juancarlospaco/nim-bytes2human/issues/2#issue-714338524
diff --git a/tools/atlas/tests/nim.cfg b/tools/atlas/tests/nim.cfg
deleted file mode 100644
index 5f568569b..000000000
--- a/tools/atlas/tests/nim.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-############# begin Atlas config section ##########
---noNimblePath
---path:"../balls"
---path:"../grok"
---path:"../ups"
---path:"../sync"
---path:"../npeg/src"
---path:"../testes"
---path:"../grok"
---path:"../nim-bytes2human/src"
-############# end Atlas config section   ##########
diff --git a/tools/atlas/tests/npeg.nimble b/tools/atlas/tests/npeg.nimble
deleted file mode 100644
index e71fc5aa5..000000000
--- a/tools/atlas/tests/npeg.nimble
+++ /dev/null
@@ -1,48 +0,0 @@
-# Package
-
-version       = "0.24.1"
-author        = "Ico Doornekamp"
-description   = "a PEG library"
-license       = "MIT"
-srcDir        = "src"
-installExt    = @["nim"]
-
-# Dependencies
-
-requires "nim >= 0.19.0"
-
-# Test
-
-task test, "Runs the test suite":
-  exec "nimble testc && nimble testcpp && nimble testarc && nimble testjs"
-
-task testc, "C tests":
-  exec "nim c -r tests/tests.nim"
-
-task testcpp, "CPP tests":
-  exec "nim cpp -r tests/tests.nim"
-
-task testjs, "JS tests":
-  exec "nim js -r tests/tests.nim"
-
-task testdanger, "Runs the test suite in danger mode":
-  exec "nim c -d:danger -r tests/tests.nim"
-
-task testwin, "Mingw tests":
-  exec "nim c -d:mingw tests/tests.nim && wine tests/tests.exe"
-
-task test32, "32 bit tests":
-  exec "nim c --cpu:i386 --passC:-m32 --passL:-m32 tests/tests.nim && tests/tests"
-
-task testall, "Test all":
-  exec "nimble test && nimble testcpp && nimble testdanger && nimble testjs && nimble testwin"
-
-when (NimMajor, NimMinor) >= (1, 1):
-  task testarc, "--gc:arc tests":
-    exec "nim c --gc:arc -r tests/tests.nim"
-else:
-  task testarc, "--gc:arc tests":
-    exec "true"
-
-task perf, "Test performance":
-  exec "nim cpp -r -d:danger tests/performance.nim"
diff --git a/tools/atlas/tests/packages/packages.json b/tools/atlas/tests/packages/packages.json
deleted file mode 100644
index d054a201b..000000000
--- a/tools/atlas/tests/packages/packages.json
+++ /dev/null
@@ -1,36 +0,0 @@
-[
-  {
-    "name": "bytes2human",
-    "url": "https://github.com/juancarlospaco/nim-bytes2human",
-    "method": "git",
-    "tags": [
-      "bytes",
-      "human",
-      "minimalism",
-      "size"
-    ],
-    "description": "Convert bytes to kilobytes, megabytes, gigabytes, etc.",
-    "license": "LGPLv3",
-    "web": "https://github.com/juancarlospaco/nim-bytes2human"
-  },
-  {
-    "name": "npeg",
-    "url": "https://github.com/zevv/npeg",
-    "method": "git",
-    "tags": [
-      "PEG",
-      "parser",
-      "parsing",
-      "regexp",
-      "regular",
-      "grammar",
-      "lexer",
-      "lexing",
-      "pattern",
-      "matching"
-    ],
-    "description": "PEG (Parsing Expression Grammars) string matching library for Nim",
-    "license": "MIT",
-    "web": "https://github.com/zevv/npeg"
-  }
-]
diff --git a/tools/atlas/tests/sync.nimble b/tools/atlas/tests/sync.nimble
deleted file mode 100644
index a07ae8925..000000000
--- a/tools/atlas/tests/sync.nimble
+++ /dev/null
@@ -1,10 +0,0 @@
-# Package
-
-version     = "1.4.0"
-author      = "Antonis Geralis"
-description = "Useful synchronization primitives."
-license     = "MIT"
-
-# Deps
-
-requires "nim >= 1.0.0"
diff --git a/tools/atlas/tests/testes.nimble b/tools/atlas/tests/testes.nimble
deleted file mode 100644
index 60fe1d508..000000000
--- a/tools/atlas/tests/testes.nimble
+++ /dev/null
@@ -1,23 +0,0 @@
-version = "1.0.0"
-author = "disruptek"
-description = "a cure for salty testes"
-license = "MIT"
-
-#requires "cligen >= 0.9.41 & <= 0.9.45"
-#requires "bump >= 1.8.18 & < 2.0.0"
-requires "https://github.com/disruptek/grok >= 0.0.4 & < 1.0.0"
-requires "https://github.com/juancarlospaco/nim-bytes2human"
-
-bin = @["testes"]           # build the binary for basic test running
-installExt = @["nim"]       # we need to install testes.nim also
-skipDirs = @["tests"]       # so stupid...  who doesn't want tests?
-
-task test, "run tests for ci":
-  exec "nim c --run testes.nim"
-
-task demo, "produce a demo":
-  when (NimMajor, NimMinor) != (1, 0):
-    echo "due to nim bug #16307, use nim-1.0"
-    quit 1
-  exec """demo docs/demo.svg "nim c --out=\$1 examples/balls.nim""""
-  exec """demo docs/clean.svg "nim c --define:danger --out=\$1 tests/testicles.nim""""
diff --git a/tools/atlas/tests/ups.nimble b/tools/atlas/tests/ups.nimble
deleted file mode 100644
index d91abbe60..000000000
--- a/tools/atlas/tests/ups.nimble
+++ /dev/null
@@ -1,13 +0,0 @@
-version = "0.0.2"
-author = "disruptek"
-description = "a package handler"
-license = "MIT"
-
-requires "npeg >= 0.23.2 & < 1.0.0"
-requires "https://github.com/disruptek/testes >= 1.0.0 & < 2.0.0"
-
-task test, "run tests":
-  when defined(windows):
-    exec "testes.cmd"
-  else:
-    exec findExe"testes"
diff --git a/tools/debug/customdebugtype.nim b/tools/debug/customdebugtype.nim
new file mode 100644
index 000000000..f48979661
--- /dev/null
+++ b/tools/debug/customdebugtype.nim
@@ -0,0 +1,72 @@
+## This is a demo file containing an example of how to
+## create custom LLDB summaries and objects with synthetic
+## children. These are implemented in Nim and called from the Python
+## nimlldb.py module.
+##
+## For summaries, prefix your proc names with "lldbDebugSummary", use
+## the `{.exportc.}` pragma, and return a string. Also, any `$` proc
+## that is available will be used for a given type.
+##
+## For creating a synthetic object (LLDB will display the children), use
+## the prefix "lldbDebugSynthetic", use the `{.exportc.}` pragma, and
+## return any Nim object, array, or sequence. Returning a Nim object
+## will display the fields and values of the object as children.
+## Returning an array or sequence will display children with the index
+## surrounded by square brackets as the key name
+##
+## You may also return a Nim table that contains the string
+## "LLDBDynamicObject" (case insensitive). This allows for dynamic
+## fields to be created at runtime instead of at compile time if you
+## return a Nim object as mentioned above. See the proc
+## `lldbDebugSyntheticDynamicFields` below for an example
+
+import intsets
+import tables
+
+type
+  CustomType* = object of RootObj # RootObj is not necessary, but can be used
+    myField*: int
+
+  DynamicFields* = object
+    customField*: string
+
+  CustomSyntheticReturn* = object
+    differentField*: float
+
+  LLDBDynamicObject = object
+    fields: TableRef[string, int]
+
+  LLDBDynamicObjectDynamicFields = object
+    fields: TableRef[string, string]
+
+proc lldbDebugSummaryCustomType*(ty: CustomType): string {.exportc.} =
+  ## Will display "CustomType(myField: <int_val>)" as a summary
+  result = "CustomType" & $ty
+
+proc lldbDebugSyntheticCustomType*(ty: CustomType): CustomSyntheticReturn {.exportc.} =
+  ## Will display differentField: <float_val> as a child of CustomType instead of
+  ## myField: <int_val>
+  result = CustomSyntheticReturn(differentField: ty.myField.float)
+
+proc lldbDebugSyntheticDynamicFields*(ty: DynamicFields): LLDBDynamicObjectDynamicFields {.exportc.} =
+  ## Returning an object that contains "LLDBDynamicObject" in the type name will expect an
+  ## object with one property that is a Nim Table/TableRef. If the key is a string,
+  ## it will appear in the debugger like an object field name. The value will be whatever you
+  ## set it to here as well.
+  let fields = {"customFieldName": ty.customField & " MORE TEXT"}.newTable()
+  return LLDBDynamicObjectDynamicFields(fields: fields)
+
+proc lldbDebugSummaryIntSet*(intset: IntSet): string {.exportc.} =
+  ## This will print the object in the LLDB summary just as Nim prints it
+  result = $intset
+
+proc lldbDebugSyntheticIntSet*(intset: IntSet): seq[int] {.exportc.} =
+  ## This will create a synthetic object to make it so that IntSet
+  ## will appear as a Nim object in the LLDB debugger window
+  ##
+  ## returning a seq here will display children like:
+  ## [0]: <child_value>
+  ##
+  result = newSeqOfCap[int](intset.len)
+  for val in intset:
+    result.add(val)
diff --git a/tools/nim-gdb.py b/tools/debug/nim-gdb.py
index f35b9a033..8c9854bda 100644
--- a/tools/nim-gdb.py
+++ b/tools/debug/nim-gdb.py
@@ -16,6 +16,10 @@ def printErrorOnce(id, message):
     errorSet.add(id)
     gdb.write("printErrorOnce: " + message, gdb.STDERR)
 
+def debugPrint(x):
+  gdb.write(str(x) + "\n", gdb.STDERR)
+
+NIM_STRING_TYPES = ["NimStringDesc", "NimStringV2"]
 
 ################################################################################
 #####  Type pretty printers
@@ -23,23 +27,28 @@ def printErrorOnce(id, message):
 
 type_hash_regex = re.compile("^([A-Za-z0-9]*)_([A-Za-z0-9]*)_+([A-Za-z0-9]*)$")
 
+def getNimName(typ):
+  if m := type_hash_regex.match(typ):
+    return m.group(2)
+  return f"unknown <{typ}>"
+
 def getNimRti(type_name):
   """ Return a ``gdb.Value`` object for the Nim Runtime Information of ``type_name``. """
 
   # Get static const TNimType variable. This should be available for
   # every non trivial Nim type.
   m = type_hash_regex.match(type_name)
-  lookups = [
-    "NTI" + m.group(2).lower() + "__" + m.group(3) + "_",
-    "NTI" + "__" + m.group(3) + "_",
-    "NTI" + m.group(2).replace("colon", "58").lower() + "__" + m.group(3) + "_"
-    ]
   if m:
-      for l in lookups:
-        try:
-          return gdb.parse_and_eval(l)
-        except:
-          pass
+    lookups = [
+      "NTI" + m.group(2).lower() + "__" + m.group(3) + "_",
+      "NTI" + "__" + m.group(3) + "_",
+      "NTI" + m.group(2).replace("colon", "58").lower() + "__" + m.group(3) + "_"
+      ]
+    for l in lookups:
+      try:
+        return gdb.parse_and_eval(l)
+      except:
+        pass
   None
 
 def getNameFromNimRti(rti):
@@ -68,7 +77,7 @@ class NimTypeRecognizer:
     
     'NIM_BOOL': 'bool',
 
-    'NIM_CHAR': 'char', 'NCSTRING': 'cstring', 'NimStringDesc': 'string'
+    'NIM_CHAR': 'char', 'NCSTRING': 'cstring', 'NimStringDesc': 'string', 'NimStringV2': 'string'
   }
 
   # object_type_pattern = re.compile("^(\w*):ObjectType$")
@@ -103,6 +112,12 @@ class NimTypeRecognizer:
       result = self.type_map_static.get(tname, None)
       if result:
         return result
+      elif tname.startswith("tyEnum_"):
+        return getNimName(tname)
+      elif tname.startswith("tyTuple__"):
+        # We make the name be the field types (Just like in Nim)
+        fields = ", ".join([self.recognize(field.type) for field in type_obj.fields()])
+        return f"({fields})"
 
       rti = getNimRti(tname)
       if rti:
@@ -136,7 +151,7 @@ class DollarPrintFunction (gdb.Function):
   "Nim's equivalent of $ operator as a gdb function, available in expressions `print $dollar(myvalue)"
 
   dollar_functions = re.findall(
-    'NimStringDesc \*(dollar__[A-z0-9_]+?)\(([^,)]*)\);',
+    r'(?:NimStringDesc \*|NimStringV2)\s?(dollar__[A-z0-9_]+?)\(([^,)]*)\);',
     gdb.execute("info functions dollar__", True, True)
   )
 
@@ -145,25 +160,23 @@ class DollarPrintFunction (gdb.Function):
 
 
   @staticmethod
-  def invoke_static(arg):
-
-    if arg.type.code == gdb.TYPE_CODE_PTR and arg.type.target().name == "NimStringDesc":
+  def invoke_static(arg, ignore_errors = False):
+    if arg.type.code == gdb.TYPE_CODE_PTR and arg.type.target().name in NIM_STRING_TYPES:
       return arg
-
     argTypeName = str(arg.type)
-
     for func, arg_typ in DollarPrintFunction.dollar_functions:
       # this way of overload resolution cannot deal with type aliases,
       # therefore it won't find all overloads.
       if arg_typ == argTypeName:
-        func_value = gdb.lookup_global_symbol(func, gdb.SYMBOL_FUNCTIONS_DOMAIN).value()
+        func_value = gdb.lookup_global_symbol(func, gdb.SYMBOL_FUNCTION_DOMAIN).value()
         return func_value(arg)
 
       elif arg_typ == argTypeName + " *":
-        func_value = gdb.lookup_global_symbol(func, gdb.SYMBOL_FUNCTIONS_DOMAIN).value()
+        func_value = gdb.lookup_global_symbol(func, gdb.SYMBOL_FUNCTION_DOMAIN).value()
         return func_value(arg.address)
 
-    printErrorOnce(argTypeName, "No suitable Nim $ operator found for type: " + argTypeName + "\n")
+    if not ignore_errors:
+      debugPrint(f"No suitable Nim $ operator found for type: {getNimName(argTypeName)}\n")
     return None
 
   def invoke(self, arg):
@@ -184,11 +197,11 @@ class NimStringEqFunction (gdb.Function):
 
   @staticmethod
   def invoke_static(arg1,arg2):
-    if arg1.type.code == gdb.TYPE_CODE_PTR and arg1.type.target().name == "NimStringDesc":
+    if arg1.type.code == gdb.TYPE_CODE_PTR and arg1.type.target().name in NIM_STRING_TYPES:
       str1 = NimStringPrinter(arg1).to_string()
     else:
       str1 = arg1.string()
-    if arg2.type.code == gdb.TYPE_CODE_PTR and arg2.type.target().name == "NimStringDesc":
+    if arg2.type.code == gdb.TYPE_CODE_PTR and arg2.type.target().name in NIM_STRING_TYPES:
       str2 = NimStringPrinter(arg1).to_string()
     else:
       str2 = arg2.string()
@@ -216,7 +229,7 @@ class DollarPrintCmd (gdb.Command):
     strValue = DollarPrintFunction.invoke_static(param)
     if strValue:
       gdb.write(
-        NimStringPrinter(strValue).to_string() + "\n",
+        str(NimStringPrinter(strValue)) + "\n",
         gdb.STDOUT
       )
 
@@ -254,7 +267,6 @@ class KochCmd (gdb.Command):
       os.path.dirname(os.path.dirname(__file__)), "koch")
 
   def invoke(self, argument, from_tty):
-    import os
     subprocess.run([self.binary] + gdb.string_to_argv(argument))
 
 KochCmd()
@@ -308,8 +320,14 @@ class NimBoolPrinter:
 
 ################################################################################
 
+def strFromLazy(strVal):
+  if isinstance(strVal, str):
+    return strVal
+  else:
+    return strVal.value().string("utf-8")
+
 class NimStringPrinter:
-  pattern = re.compile(r'^NimStringDesc \*$')
+  pattern = re.compile(r'^(NimStringDesc \*|NimStringV2)$')
 
   def __init__(self, val):
     self.val = val
@@ -319,11 +337,19 @@ class NimStringPrinter:
 
   def to_string(self):
     if self.val:
-      l = int(self.val['Sup']['len'])
-      return self.val['data'].lazy_string(encoding="utf-8", length=l)
+      if self.val.type.name == "NimStringV2":
+        l = int(self.val["len"])
+        data = self.val["p"]["data"]
+      else:
+        l = int(self.val['Sup']['len'])
+        data = self.val["data"]
+      return data.lazy_string(encoding="utf-8", length=l)
     else:
       return ""
 
+  def __str__(self):
+    return strFromLazy(self.to_string())
+
 class NimRopePrinter:
   pattern = re.compile(r'^tyObject_RopeObj__([A-Za-z0-9]*) \*$')
 
@@ -345,39 +371,11 @@ class NimRopePrinter:
 
 ################################################################################
 
-# proc reprEnum(e: int, typ: PNimType): string {.compilerRtl.} =
-#   ## Return string representation for enumeration values
-#   var n = typ.node
-#   if ntfEnumHole notin typ.flags:
-#     let o = e - n.sons[0].offset
-#     if o >= 0 and o <% typ.node.len:
-#       return $n.sons[o].name
-#   else:
-#     # ugh we need a slow linear search:
-#     var s = n.sons
-#     for i in 0 .. n.len-1:
-#       if s[i].offset == e:
-#         return $s[i].name
-#   result = $e & " (invalid data!)"
-
 def reprEnum(e, typ):
-  """ this is a port of the nim runtime function `reprEnum` to python """
+  # Casts the value to the enum type and then calls the enum printer
   e = int(e)
-  n = typ["node"]
-  flags = int(typ["flags"])
-  # 1 << 6 is {ntfEnumHole}
-  if ((1 << 6) & flags) == 0:
-    o = e - int(n["sons"][0]["offset"])
-    if o >= 0 and 0 < int(n["len"]):
-      return n["sons"][o]["name"].string("utf-8", "ignore")
-  else:
-    # ugh we need a slow linear search:
-    s = n["sons"]
-    for i in range(0, int(n["len"])):
-      if int(s[i]["offset"]) == e:
-        return s[i]["name"].string("utf-8", "ignore")
-
-  return str(e) + " (invalid data!)"
+  val = gdb.Value(e).cast(typ)
+  return strFromLazy(NimEnumPrinter(val).to_string())
 
 def enumNti(typeNimName, idString):
   typeInfoName = "NTI" + typeNimName.lower() + "__" + idString + "_"
@@ -389,6 +387,7 @@ def enumNti(typeNimName, idString):
 
 class NimEnumPrinter:
   pattern = re.compile(r'^tyEnum_([A-Za-z0-9]+)__([A-Za-z0-9]*)$')
+  enumReprProc = gdb.lookup_global_symbol("reprEnum", gdb.SYMBOL_FUNCTION_DOMAIN)
 
   def __init__(self, val):
     self.val = val
@@ -397,14 +396,18 @@ class NimEnumPrinter:
     self.typeNimName  = match.group(1)
     typeInfoName, self.nti = enumNti(self.typeNimName, match.group(2))
 
-    if self.nti is None:
-      printErrorOnce(typeInfoName, f"NimEnumPrinter: lookup global symbol: '{typeInfoName}' failed for {typeName}.\n")
-
   def to_string(self):
-    if self.nti:
-      arg0     = self.val
-      arg1     = self.nti.value(gdb.newest_frame())
-      return reprEnum(arg0, arg1)
+    if NimEnumPrinter.enumReprProc and self.nti:
+      # Use the old runtimes enumRepr function.
+      # We call the Nim proc itself so that the implementation is correct
+      f = gdb.newest_frame()
+      # We need to strip the quotes so it looks like an enum instead of a string
+      reprProc = NimEnumPrinter.enumReprProc.value()
+      return str(reprProc(self.val, self.nti.value(f).address)).strip('"')
+    elif dollarResult := DollarPrintFunction.invoke_static(self.val):
+      # New runtime doesn't use enumRepr so we instead try and call the
+      # dollar function for it
+      return str(NimStringPrinter(dollarResult))
     else:
       return self.typeNimName + "(" + str(int(self.val)) + ")"
 
@@ -421,26 +424,20 @@ class NimSetPrinter:
     typeName = self.val.type.name
     match = self.pattern.match(typeName)
     self.typeNimName = match.group(1)
-    typeInfoName, self.nti = enumNti(self.typeNimName, match.group(2))
-
-    if self.nti is None:
-      printErrorOnce(typeInfoName, f"NimSetPrinter: lookup global symbol: '{typeInfoName}' failed for {typeName}.\n")
 
   def to_string(self):
-    if self.nti:
-      nti = self.nti.value(gdb.newest_frame())
-      enumStrings = []
-      val = int(self.val)
-      i   = 0
-      while val > 0:
-        if (val & 1) == 1:
-          enumStrings.append(reprEnum(i, nti))
-        val = val >> 1
-        i += 1
-
-      return '{' + ', '.join(enumStrings) + '}'
-    else:
-      return str(int(self.val))
+    # Remove the tySet from the type name
+    typ = gdb.lookup_type(self.val.type.name[6:])
+    enumStrings = []
+    val = int(self.val)
+    i   = 0
+    while val > 0:
+      if (val & 1) == 1:
+        enumStrings.append(reprEnum(i, typ))
+      val = val >> 1
+      i += 1
+
+    return '{' + ', '.join(enumStrings) + '}'
 
 ################################################################################
 
@@ -472,41 +469,81 @@ class NimHashSetPrinter:
 
 ################################################################################
 
-class NimSeqPrinter:
-  # the pointer is explicity part of the type. So it is part of
-  # ``pattern``.
-  pattern = re.compile(r'^tySequence_\w* \*$')
+class NimSeq:
+  # Wrapper around sequences.
+  # This handles the differences between old and new runtime
 
   def __init__(self, val):
     self.val = val
+    # new runtime has sequences on stack, old has them on heap
+    self.new = val.type.code != gdb.TYPE_CODE_PTR
+    if self.new:
+      # Some seqs are just the content and to save repeating ourselves we do
+      # handle them here. Only thing that needs to check this is the len/data getters
+      self.isContent = val.type.name.endswith("Content")
+
+  def __bool__(self):
+    if self.new:
+      return self.val is not None
+    else:
+      return bool(self.val)
+
+  def __len__(self):
+    if not self:
+      return 0
+    if self.new:
+      if self.isContent:
+        return int(self.val["cap"])
+      else:
+        return int(self.val["len"])
+    else:
+      return self.val["Sup"]["len"]
+
+  @property
+  def data(self):
+    if self.new:
+      if self.isContent:
+        return self.val["data"]
+      elif self.val["p"]:
+        return self.val["p"]["data"]
+    else:
+      return self.val["data"]
+
+  @property
+  def cap(self):
+    if not self:
+      return 0
+    if self.new:
+      if self.isContent:
+        return int(self.val["cap"])
+      elif self.val["p"]:
+        return int(self.val["p"]["cap"])
+      else:
+        return 0
+    return int(self.val['Sup']['reserved'])
+
+class NimSeqPrinter:
+  pattern = re.compile(r'^tySequence_\w*\s?\*?$')
+
+  def __init__(self, val):
+    self.val = NimSeq(val)
+
 
   def display_hint(self):
     return 'array'
 
   def to_string(self):
-    len = 0
-    cap = 0
-    if self.val:
-      len = int(self.val['Sup']['len'])
-      cap = int(self.val['Sup']['reserved'])
-
-    return 'seq({0}, {1})'.format(len, cap)
+    return f'seq({len(self.val)}, {self.val.cap})'
 
   def children(self):
     if self.val:
       val = self.val
-      valType = val.type
-      length = int(val['Sup']['len'])
+      length = len(val)
 
       if length <= 0:
         return
 
-      dataType = valType['data'].type
-      data = val['data']
-
-      if self.val.type.name is None:
-        dataType = valType['data'].type.target().pointer()
-        data = val['data'].cast(dataType)
+      data = val.data
 
       inaccessible = False
       for i in range(length):
@@ -585,7 +622,7 @@ class NimTablePrinter:
     if self.val:
       counter  = int(self.val['counter'])
       if self.val['data']:
-        capacity = int(self.val['data']['Sup']['len'])
+        capacity = NimSeq(self.val["data"]).cap
 
     return 'Table({0}, {1})'.format(counter, capacity)
 
@@ -597,162 +634,18 @@ class NimTablePrinter:
           yield (idxStr + '.Field1', entry['Field1'])
           yield (idxStr + '.Field2', entry['Field2'])
 
-################################################################
+################################################################################
 
-# this is untested, therefore disabled
-
-# class NimObjectPrinter:
-#   pattern = re.compile(r'^tyObject_([A-Za-z0-9]+)__(_?[A-Za-z0-9]*)(:? \*)?$')
-
-#   def __init__(self, val):
-#     self.val = val
-#     self.valType = None
-#     self.valTypeNimName = None
-
-#   def display_hint(self):
-#     return 'object'
-
-#   def _determineValType(self):
-#     if self.valType is None:
-#       vt = self.val.type
-#       if vt.name is None:
-#         target = vt.target()
-#         self.valType = target.pointer()
-#         self.fields = target.fields()
-#         self.valTypeName = target.name
-#         self.isPointer = True
-#       else:
-#         self.valType = vt
-#         self.fields = vt.fields()
-#         self.valTypeName = vt.name
-#         self.isPointer = False
-
-#   def to_string(self):
-#     if self.valTypeNimName is None:
-#       self._determineValType()
-#       match = self.pattern.match(self.valTypeName)
-#       self.valTypeNimName = match.group(1)
-
-#     return self.valTypeNimName
-
-#   def children(self):
-#     self._determineValType()
-#     if self.isPointer and int(self.val) == 0:
-#       return
-#     self.baseVal = self.val.referenced_value() if self.isPointer else self.val
-
-#     for c in self.handleFields(self.baseVal, getNimRti(self.valTypeName)):
-#       yield c
-  
-#   def handleFields(self, currVal, rti, fields = None):
-#     rtiSons = None
-#     discField = (0, None)
-#     seenSup = False
-#     if fields is None:
-#       fields = self.fields
-#     try: # XXX: remove try after finished debugging this method
-#       for (i, field) in enumerate(fields):
-#         if field.name == "Sup": # inherited data
-#           seenSup = True
-#           baseRef = rti['base']
-#           if baseRef:
-#             baseRti = baseRef.referenced_value()
-#             baseVal = currVal['Sup']
-#             baseValType = baseVal.type
-#             if baseValType.name is None:
-#               baseValType = baseValType.target().pointer()
-#               baseValFields = baseValType.target().fields()
-#             else:
-#               baseValFields = baseValType.fields()
-            
-#             for c in self.handleFields(baseVal, baseRti, baseValFields):
-#               yield c
-#         else:
-#           if field.type.code == gdb.TYPE_CODE_UNION:
-#             # if not rtiSons:
-#             rtiNode = rti['node'].referenced_value()
-#             rtiSons = rtiNode['sons']
-
-#             if not rtiSons and int(rtiNode['len']) == 0 and str(rtiNode['name']) != "0x0":
-#               rtiSons = [rti['node']] # sons are dereferenced by the consumer
-            
-#             if not rtiSons:
-#               printErrorOnce(self.valTypeName, f"NimObjectPrinter: UNION field can't be displayed without RTI {self.valTypeName}, using fallback.\n")
-#               # yield (field.name, self.baseVal[field]) # XXX: this fallback seems wrong
-#               return # XXX: this should probably continue instead?
-
-#             if int(rtiNode['len']) != 0 and str(rtiNode['name']) != "0x0":
-#               gdb.write(f"wtf IT HAPPENED {self.valTypeName}\n", gdb.STDERR)
-
-#             discNode = rtiSons[discField[0]].referenced_value()
-#             if not discNode:
-#               raise ValueError("Can't find union discriminant field in object RTI")
-            
-#             discNodeLen = int(discNode['len'])
-#             discFieldVal = int(currVal[discField[1].name])
-
-#             unionNodeRef = None
-#             if discFieldVal < discNodeLen:
-#               unionNodeRef = discNode['sons'][discFieldVal]
-#             if not unionNodeRef:
-#               unionNodeRef = discNode['sons'][discNodeLen]
-
-#             if not unionNodeRef:
-#               printErrorOnce(self.valTypeName + "no union node", f"wtf is up with sons {self.valTypeName} {unionNodeRef} {rtiNode['offset']} {discNode} {discFieldVal} {discNodeLen} {discField[1].name} {field.name} {field.type}\n")
-#               continue
-
-#             unionNode = unionNodeRef.referenced_value()
-            
-#             fieldName = "" if field.name == None else field.name.lower()
-#             unionNodeName = "" if not unionNode['name'] else unionNode['name'].string("utf-8", "ignore")
-#             if not unionNodeName or unionNodeName.lower() != fieldName:
-#               unionFieldName = f"_{discField[1].name.lower()}_{int(rti['node'].referenced_value()['len'])}"
-#               gdb.write(f"wtf i: {i} union: {unionFieldName} field: {fieldName} type: {field.type.name} tag: {field.type.tag}\n", gdb.STDERR)
-#             else:
-#               unionFieldName = unionNodeName
-
-#             if discNodeLen == 0:
-#               yield (unionFieldName, currVal[unionFieldName])
-#             else:
-#               unionNodeLen = int(unionNode['len'])
-#               if unionNodeLen > 0:
-#                 for u in range(unionNodeLen):
-#                   un = unionNode['sons'][u].referenced_value()['name'].string("utf-8", "ignore")
-#                   yield (un, currVal[unionFieldName][un])
-#               else:
-#                 yield(unionNodeName, currVal[unionFieldName])
-#           else:
-#             discIndex = i - 1 if seenSup else i
-#             discField = (discIndex, field) # discriminant field is the last normal field
-#             yield (field.name, currVal[field.name])
-#     except GeneratorExit:
-#       raise
-#     except:
-#       gdb.write(f"wtf {self.valTypeName} {i} fn: {field.name} df: {discField} rti: {rti} rtiNode: {rti['node'].referenced_value()} rtiSons: {rtiSons} {sys.exc_info()} {traceback.format_tb(sys.exc_info()[2], limit = 10)}\n", gdb.STDERR)
-#       gdb.write(f"wtf {self.valTypeName} {i} {field.name}\n", gdb.STDERR)
-      
-#       # seenSup = False
-#       # for (i, field) in enumerate(fields):
-#       #   # if field.name:
-#       #   #   val = currVal[field.name]
-#       #   # else:
-#       #   #   val = None
-#       #   rtiNode = rti['node'].referenced_value()
-#       #   rtiLen = int(rtiNode['len'])
-#       #   if int(rtiNode['len']) > 0:
-#       #     sons = rtiNode['sons']
-#       #   elif int(rti['len']) == 0 and str(rti['name']) != "0x0":
-#       #     sons = [rti['node']] # sons are dereferenced by the consumer
-#       #   sonsIdx = i - 1 if seenSup else i
-#       #   s = sons[sonsIdx].referenced_value()
-#       #   addr = int(currVal.address)
-#       #   off = addr + int(rtiNode['offset'])
-#       #   seenSup = seenSup or field.name == "Sup"
-
-#       #   gdb.write(f"wtf: i: {i} sonsIdx: {sonsIdx} field: {field.name} rtiLen: {rtiLen} rti: {rti} rtiNode: {rtiNode} isUnion: {field.type.code == gdb.TYPE_CODE_UNION} s: {s}\n", gdb.STDERR)
-
-#       raise
+class NimTuplePrinter:
+  pattern = re.compile(r"^tyTuple__([A-Za-z0-9]*)")
 
+  def __init__(self, val):
+    self.val = val
+
+  def to_string(self):
+    # We don't have field names so just print out the tuple as if it was anonymous
+    tupleValues = [str(self.val[field.name]) for field in self.val.type.fields()]
+    return f"({', '.join(tupleValues)})"
 
 ################################################################################
 
@@ -784,7 +677,7 @@ def makematcher(klass):
   return matcher
 
 def register_nim_pretty_printers_for_object(objfile):
-  nimMainSym = gdb.lookup_global_symbol("NimMain", gdb.SYMBOL_FUNCTIONS_DOMAIN)
+  nimMainSym = gdb.lookup_global_symbol("NimMain", gdb.SYMBOL_FUNCTION_DOMAIN)
   if nimMainSym and nimMainSym.symtab.objfile == objfile:
     print("set Nim pretty printers for ", objfile.filename)
 
diff --git a/tools/debug/nimlldb.py b/tools/debug/nimlldb.py
new file mode 100644
index 000000000..4bc4e771f
--- /dev/null
+++ b/tools/debug/nimlldb.py
@@ -0,0 +1,1380 @@
+import lldb
+from collections import OrderedDict
+from typing import Union
+
+
+def sbvaluegetitem(self: lldb.SBValue, name: Union[int, str]) -> lldb.SBValue:
+    if isinstance(name, str):
+        return self.GetChildMemberWithName(name)
+    else:
+        return self.GetChildAtIndex(name)
+
+
+# Make this easier to work with
+lldb.SBValue.__getitem__ = sbvaluegetitem
+
+NIM_IS_V2 = True
+
+
+def get_nti(value: lldb.SBValue, nim_name=None):
+    name_split = value.type.name.split("_")
+    type_nim_name = nim_name or name_split[1]
+    id_string = name_split[-1].split(" ")[0]
+
+    type_info_name = "NTI" + type_nim_name.lower() + "__" + id_string + "_"
+    nti = value.target.FindFirstGlobalVariable(type_info_name)
+    if not nti.IsValid():
+        type_info_name = "NTI" + "__" + id_string + "_"
+        nti = value.target.FindFirstGlobalVariable(type_info_name)
+    if not nti.IsValid():
+        print(f"NimEnumPrinter: lookup global symbol: '{type_info_name}' failed for {value.type.name}.\n")
+    return type_nim_name, nti
+
+
+def enum_to_string(value: lldb.SBValue, int_val=None, nim_name=None):
+    tname = nim_name or value.type.name.split("_")[1]
+
+    enum_val = value.signed
+    if int_val is not None:
+        enum_val = int_val
+
+    default_val = f"{tname}.{str(enum_val)}"
+
+    fn_syms = value.target.FindFunctions("reprEnum")
+    if not fn_syms.GetSize() > 0:
+        return default_val
+
+    fn_sym: lldb.SBSymbolContext = fn_syms.GetContextAtIndex(0)
+
+    fn: lldb.SBFunction = fn_sym.function
+
+    fn_type: lldb.SBType = fn.type
+    arg_types: lldb.SBTypeList = fn_type.GetFunctionArgumentTypes()
+    if arg_types.GetSize() < 2:
+        return default_val
+
+    arg1_type: lldb.SBType = arg_types.GetTypeAtIndex(0)
+    arg2_type: lldb.SBType = arg_types.GetTypeAtIndex(1)
+
+    ty_info_name, nti = get_nti(value, nim_name=tname)
+
+    if not nti.IsValid():
+        return default_val
+
+    call = f"{fn.name}(({arg1_type.name}){enum_val}, ({arg2_type.name})" + str(nti.GetLoadAddress()) + ");"
+
+    res = executeCommand(call)
+
+    if res.error.fail:
+        return default_val
+
+    return f"{tname}.{res.summary[1:-1]}"
+
+
+def to_string(value: lldb.SBValue):
+    # For getting NimStringDesc * value
+    value = value.GetNonSyntheticValue()
+
+    # Check if data pointer is Null
+    if value.type.is_pointer and value.unsigned == 0:
+        return None
+
+    size = int(value["Sup"]["len"].unsigned)
+
+    if size == 0:
+        return ""
+
+    if size > 2**14:
+        return "... (too long) ..."
+
+    data = value["data"]
+
+    # Check if first element is NULL
+    base_data_type = value.target.FindFirstType("char")
+    cast = data.Cast(base_data_type)
+
+    if cast.unsigned == 0:
+        return None
+
+    cast = data.Cast(value.target.FindFirstType("char").GetArrayType(size))
+    return bytearray(cast.data.uint8s).decode("utf-8")
+
+
+def to_stringV2(value: lldb.SBValue):
+    # For getting NimStringV2 value
+    value = value.GetNonSyntheticValue()
+
+    data = value["p"]["data"]
+
+    # Check if data pointer is Null
+    if value["p"].unsigned == 0:
+        return None
+
+    size = int(value["len"].signed)
+
+    if size == 0:
+        return ""
+
+    if size > 2**14:
+        return "... (too long) ..."
+
+    # Check if first element is NULL
+    base_data_type = data.type.GetArrayElementType().GetTypedefedType()
+    cast = data.Cast(base_data_type)
+
+    if cast.unsigned == 0:
+        return None
+
+    cast = data.Cast(base_data_type.GetArrayType(size))
+    return bytearray(cast.data.uint8s).decode("utf-8")
+
+
+def NimString(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    if NIM_IS_V2:
+        res = to_stringV2(value)
+    else:
+        res = to_string(value)
+
+    if res is not None:
+        return f'"{res}"'
+    else:
+        return "nil"
+
+
+def rope_helper(value: lldb.SBValue) -> str:
+    value = value.GetNonSyntheticValue()
+    if value.type.is_pointer and value.unsigned == 0:
+        return ""
+
+    if value["length"].unsigned == 0:
+        return ""
+
+    if NIM_IS_V2:
+        str_val = to_stringV2(value["data"])
+    else:
+        str_val = to_string(value["data"])
+
+    if str_val is None:
+        str_val = ""
+
+    return rope_helper(value["left"]) + str_val + rope_helper(value["right"])
+
+
+def Rope(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    rope_str = rope_helper(value)
+
+    if len(rope_str) == 0:
+        rope_str = "nil"
+    else:
+        rope_str = f'"{rope_str}"'
+
+    return f"Rope({rope_str})"
+
+
+def NCSTRING(value: lldb.SBValue, internal_dict=None):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    ty = value.Dereference().type
+    val = value.target.CreateValueFromAddress(
+        value.name or "temp", lldb.SBAddress(value.unsigned, value.target), ty
+    ).AddressOf()
+    return val.summary
+
+
+def ObjectV2(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    orig_value = value.GetNonSyntheticValue()
+    if orig_value.type.is_pointer and orig_value.unsigned == 0:
+        return "nil"
+
+    custom_summary = get_custom_summary(value)
+    if custom_summary is not None:
+        return custom_summary
+
+    while orig_value.type.is_pointer:
+        orig_value = orig_value.Dereference()
+
+    if "_" in orig_value.type.name:
+        obj_name = orig_value.type.name.split("_")[1].replace("colonObjectType", "")
+    else:
+        obj_name = orig_value.type.name
+
+    num_children = value.num_children
+    fields = []
+
+    for i in range(num_children):
+        fields.append(f"{value[i].name}: {value[i].summary}")
+
+    res = f"{obj_name}(" + ", ".join(fields) + ")"
+    return res
+
+
+def Number(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    if value.type.is_pointer and value.signed == 0:
+        return "nil"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    return str(value.signed)
+
+
+def Float(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    return str(value.value)
+
+
+def UnsignedNumber(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    return str(value.unsigned)
+
+
+def Bool(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    return str(value.value)
+
+
+def CharArray(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    return str([f"'{char}'" for char in value.uint8s])
+
+
+def Array(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    value = value.GetNonSyntheticValue()
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    value = value.GetNonSyntheticValue()
+    return "[" + ", ".join([value[i].summary for i in range(value.num_children)]) + "]"
+
+
+def Tuple(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    while value.type.is_pointer:
+        value = value.Dereference()
+
+    num_children = value.num_children
+
+    fields = []
+
+    for i in range(num_children):
+        key = value[i].name
+        val = value[i].summary
+        if key.startswith("Field"):
+            fields.append(f"{val}")
+        else:
+            fields.append(f"{key}: {val}")
+
+    return "(" + ", ".join(fields) + f")"
+
+
+def is_local(value: lldb.SBValue) -> bool:
+    line: lldb.SBLineEntry = value.frame.GetLineEntry()
+    decl: lldb.SBDeclaration = value.GetDeclaration()
+
+    if line.file == decl.file and decl.line != 0:
+        return True
+
+    return False
+
+
+def is_in_scope(value: lldb.SBValue) -> bool:
+    line: lldb.SBLineEntry = value.frame.GetLineEntry()
+    decl: lldb.SBDeclaration = value.GetDeclaration()
+
+    if is_local(value) and decl.line < line.line:
+        return True
+
+    return False
+
+
+def Enum(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_value_summary(value)
+    if custom_summary is not None:
+        return custom_summary
+
+    return enum_to_string(value)
+
+
+def EnumSet(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    vals = []
+    max_vals = 7
+    for child in value.children:
+        vals.append(child.summary)
+        if len(vals) > max_vals:
+            vals.append("...")
+            break
+
+    return "{" + ", ".join(vals) + "}"
+
+
+def Set(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if custom_summary is not None:
+        return custom_summary
+
+    vals = []
+    max_vals = 7
+    for child in value.children:
+        vals.append(child.value)
+        if len(vals) > max_vals:
+            vals.append("...")
+            break
+
+    return "{" + ", ".join(vals) + "}"
+
+
+def Table(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if custom_summary is not None:
+        return custom_summary
+
+    fields = []
+
+    for i in range(value.num_children):
+        key = value[i].name
+        val = value[i].summary
+        fields.append(f"{key}: {val}")
+
+    return "Table({" + ", ".join(fields) + "})"
+
+
+def HashSet(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if custom_summary is not None:
+        return custom_summary
+
+    fields = []
+
+    for i in range(value.num_children):
+        fields.append(f"{value[i].summary}")
+
+    return "HashSet({" + ", ".join(fields) + "})"
+
+
+def StringTable(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    fields = []
+
+    for i in range(value.num_children - 1):
+        key = value[i].name
+        val = value[i].summary
+        fields.append(f"{key}: {val}")
+
+    mode = value[value.num_children - 1].summary
+
+    return "StringTable({" + ", ".join(fields) + f"}}, mode={mode})"
+
+
+def Sequence(value: lldb.SBValue, internal_dict):
+    if is_local(value):
+        if not is_in_scope(value):
+            return "undefined"
+
+    custom_summary = get_custom_summary(value)
+    if not custom_summary is None:
+        return custom_summary
+
+    return "@[" + ", ".join([value[i].summary for i in range(value.num_children)]) + "]"
+
+
+class StringChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.data_type: lldb.SBType
+        if not NIM_IS_V2:
+            self.data_type = self.value.target.FindFirstType("char")
+
+        self.first_element: lldb.SBValue
+        self.update()
+        self.count = 0
+
+    def num_children(self):
+        return self.count
+
+    def get_child_index(self, name):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        offset = index * self.data_size
+        return self.first_element.CreateChildAtOffset("[" + str(index) + "]", offset, self.data_type)
+
+    def get_data(self) -> lldb.SBValue:
+        return self.value["p"]["data"] if NIM_IS_V2 else self.value["data"]
+
+    def get_len(self) -> int:
+        if NIM_IS_V2:
+            if self.value["p"].unsigned == 0:
+                return 0
+
+            size = int(self.value["len"].signed)
+
+            if size == 0:
+                return 0
+
+            data = self.value["p"]["data"]
+
+            # Check if first element is NULL
+            base_data_type = data.type.GetArrayElementType().GetTypedefedType()
+            cast = data.Cast(base_data_type)
+
+            if cast.unsigned == 0:
+                return 0
+        else:
+            if self.value.type.is_pointer and self.value.unsigned == 0:
+                return 0
+
+            size = int(self.value["Sup"]["len"].unsigned)
+
+            if size == 0:
+                return 0
+
+            data = self.value["data"]
+
+            # Check if first element is NULL
+            base_data_type = self.value.target.FindFirstType("char")
+            cast = data.Cast(base_data_type)
+
+            if cast.unsigned == 0:
+                return 0
+
+        return size
+
+    def update(self):
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        data = self.get_data()
+        size = self.get_len()
+
+        self.count = size
+        self.first_element = data
+
+        if NIM_IS_V2:
+            self.data_type = data.type.GetArrayElementType().GetTypedefedType()
+
+        self.data_size = self.data_type.GetByteSize()
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class ArrayChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.data_type: lldb.SBType
+        self.first_element: lldb.SBValue
+        self.update()
+
+    def num_children(self):
+        return self.has_children() and self.value.num_children
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        offset = index * self.value[index].GetByteSize()
+        return self.first_element.CreateChildAtOffset("[" + str(index) + "]", offset, self.data_type)
+
+    def update(self):
+        if not self.has_children():
+            return
+
+        self.first_element = self.value[0]
+        self.data_type = self.value.type.GetArrayElementType()
+
+    def has_children(self):
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return False
+        return bool(self.value.num_children)
+
+
+class SeqChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.data_type: lldb.SBType
+        self.first_element: lldb.SBValue
+        self.data: lldb.SBValue
+        self.count = 0
+        self.update()
+
+    def num_children(self):
+        return self.count
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        offset = index * self.data[index].GetByteSize()
+        return self.first_element.CreateChildAtOffset("[" + str(index) + "]", offset, self.data_type)
+
+    def get_data(self) -> lldb.SBValue:
+        return self.value["p"]["data"] if NIM_IS_V2 else self.value["data"]
+
+    def get_len(self) -> lldb.SBValue:
+        return self.value["len"] if NIM_IS_V2 else self.value["Sup"]["len"]
+
+    def update(self):
+        self.count = 0
+
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        self.count = self.get_len().unsigned
+
+        if not self.has_children():
+            return
+
+        data = self.get_data()
+        self.data_type = data.type.GetArrayElementType()
+
+        self.data = data.Cast(self.data_type.GetArrayType(self.num_children()))
+        self.first_element = self.data
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class ObjectChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.data_type: lldb.SBType
+        self.first_element: lldb.SBValue
+        self.data: lldb.SBValue
+        self.children: OrderedDict[str, int] = OrderedDict()
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.children)
+
+    def get_child_index(self, name: str):
+        return self.children[name]
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def populate_children(self):
+        self.children.clear()
+        self.child_list = []
+
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        stack = [self.value.GetNonSyntheticValue()]
+
+        index = 0
+
+        while stack:
+            cur_val = stack.pop()
+            if cur_val.type.is_pointer and cur_val.unsigned == 0:
+                continue
+
+            while cur_val.type.is_pointer:
+                cur_val = cur_val.Dereference()
+
+            # Add super objects if they exist
+            if cur_val.num_children > 0 and cur_val[0].name == "Sup" and cur_val[0].type.name.startswith("tyObject"):
+                stack.append(cur_val[0])
+
+            for child in cur_val.children:
+                child = child.GetNonSyntheticValue()
+                if child.name == "Sup":
+                    continue
+                self.children[child.name] = index
+                self.child_list.append(child)
+                index += 1
+
+    def update(self):
+        self.populate_children()
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class HashSetChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def get_data(self) -> lldb.SBValue:
+        return self.value["data"]["p"]["data"] if NIM_IS_V2 else self.value["data"]["data"]
+
+    def get_len(self) -> lldb.SBValue:
+        return self.value["data"]["len"] if NIM_IS_V2 else self.value["data"]["Sup"]["len"]
+
+    def update(self):
+        self.child_list = []
+
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        tuple_len = int(self.get_len().unsigned)
+        tuple = self.get_data()
+
+        base_data_type = tuple.type.GetArrayElementType()
+
+        cast = tuple.Cast(base_data_type.GetArrayType(tuple_len))
+
+        index = 0
+        for i in range(tuple_len):
+            el = cast[i]
+            field0 = int(el[0].unsigned)
+            if field0 == 0:
+                continue
+            key = el[1]
+            child = key.CreateValueFromAddress(f"[{str(index)}]", key.GetLoadAddress(), key.GetType())
+            index += 1
+
+            self.child_list.append(child)
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class SetCharChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.ty = self.value.target.FindFirstType("char")
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def update(self):
+        self.child_list = []
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        cur_pos = 0
+        for child in self.value.children:
+            child_val = child.signed
+            if child_val != 0:
+                temp = child_val
+                num_bits = 8
+                while temp != 0:
+                    is_set = temp & 1
+                    if is_set == 1:
+                        data = lldb.SBData.CreateDataFromInt(cur_pos)
+                        child = self.value.synthetic_child_from_data(f"[{len(self.child_list)}]", data, self.ty)
+                        self.child_list.append(child)
+                    temp = temp >> 1
+                    cur_pos += 1
+                    num_bits -= 1
+                cur_pos += num_bits
+            else:
+                cur_pos += 8
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+def create_set_children(value: lldb.SBValue, child_type: lldb.SBType, starting_pos: int) -> list[lldb.SBValue]:
+    child_list: list[lldb.SBValue] = []
+    cur_pos = starting_pos
+
+    if value.num_children > 0:
+        children = value.children
+    else:
+        children = [value]
+
+    for child in children:
+        child_val = child.signed
+        if child_val != 0:
+            temp = child_val
+            num_bits = 8
+            while temp != 0:
+                is_set = temp & 1
+                if is_set == 1:
+                    data = lldb.SBData.CreateDataFromInt(cur_pos)
+                    child = value.synthetic_child_from_data(f"[{len(child_list)}]", data, child_type)
+                    child_list.append(child)
+                temp = temp >> 1
+                cur_pos += 1
+                num_bits -= 1
+            cur_pos += num_bits
+        else:
+            cur_pos += 8
+
+    return child_list
+
+
+class SetIntChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.ty = self.value.target.FindFirstType(f"NI64")
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def update(self):
+        self.child_list = []
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+        bits = self.value.GetByteSize() * 8
+        starting_pos = -(bits // 2)
+        self.child_list = create_set_children(self.value, self.ty, starting_pos)
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class SetUIntChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.ty = self.value.target.FindFirstType(f"NU64")
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def update(self):
+        self.child_list = []
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+        self.child_list = create_set_children(self.value, self.ty, starting_pos=0)
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class SetEnumChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.ty = self.value.target.FindFirstType(self.value.type.name.replace("tySet_", ""))
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return int(name.lstrip("[").rstrip("]"))
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def update(self):
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+        self.child_list = create_set_children(self.value, self.ty, starting_pos=0)
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class TableChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.children: OrderedDict[str, int] = OrderedDict()
+        self.child_list: list[lldb.SBValue] = []
+
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return self.children[name]
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def get_data(self) -> lldb.SBValue:
+        return self.value["data"]["p"]["data"] if NIM_IS_V2 else self.value["data"]["data"]
+
+    def get_len(self) -> lldb.SBValue:
+        return self.value["data"]["len"] if NIM_IS_V2 else self.value["data"]["Sup"]["len"]
+
+    def update(self):
+        self.child_list = []
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        tuple_len = int(self.get_len().unsigned)
+        tuple = self.get_data()
+
+        base_data_type = tuple.type.GetArrayElementType()
+
+        cast = tuple.Cast(base_data_type.GetArrayType(tuple_len))
+
+        index = 0
+        for i in range(tuple_len):
+            el = cast[i]
+            field0 = int(el[0].unsigned)
+            if field0 == 0:
+                continue
+            key = el[1]
+            val = el[2]
+            key_summary = key.summary
+            child = self.value.CreateValueFromAddress(key_summary, val.GetLoadAddress(), val.GetType())
+            self.child_list.append(child)
+            self.children[key_summary] = index
+            index += 1
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class StringTableChildrenProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value = value
+        self.children: OrderedDict[str, int] = OrderedDict()
+        self.child_list: list[lldb.SBValue] = []
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return self.children[name]
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def get_data(self) -> lldb.SBValue:
+        return self.value["data"]["p"]["data"] if NIM_IS_V2 else self.value["data"]["data"]
+
+    def get_len(self) -> lldb.SBValue:
+        return self.value["data"]["len"] if NIM_IS_V2 else self.value["data"]["Sup"]["len"]
+
+    def update(self):
+        self.children.clear()
+        self.child_list = []
+
+        if is_local(self.value):
+            if not is_in_scope(self.value):
+                return
+
+        tuple_len = int(self.get_len().unsigned)
+        tuple = self.get_data()
+
+        base_data_type = tuple.type.GetArrayElementType()
+
+        cast = tuple.Cast(base_data_type.GetArrayType(tuple_len))
+
+        index = 0
+        for i in range(tuple_len):
+            el = cast[i]
+            field0 = int(el[2].unsigned)
+            if field0 == 0:
+                continue
+            key = el[0]
+            val = el[1]
+            child = val.CreateValueFromAddress(key.summary, val.GetLoadAddress(), val.GetType())
+            self.child_list.append(child)
+            self.children[key.summary] = index
+            index += 1
+
+        self.child_list.append(self.value["mode"])
+        self.children["mode"] = index
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class LLDBDynamicObjectProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        value = value.GetNonSyntheticValue()
+        self.value: lldb.SBValue = value[0]
+        self.children: OrderedDict[str, int] = OrderedDict()
+        self.child_list: list[lldb.SBValue] = []
+
+        while self.value.type.is_pointer:
+            self.value = self.value.Dereference()
+
+        self.update()
+
+    def num_children(self):
+        return len(self.child_list)
+
+    def get_child_index(self, name: str):
+        return self.children[name]
+
+    def get_child_at_index(self, index):
+        return self.child_list[index]
+
+    def update(self):
+        self.children.clear()
+        self.child_list = []
+
+        for i, child in enumerate(self.value.children):
+            name = child.name.strip('"')
+            new_child = child.CreateValueFromAddress(name, child.GetLoadAddress(), child.GetType())
+
+            self.children[name] = i
+            self.child_list.append(new_child)
+
+    def has_children(self):
+        return bool(self.num_children())
+
+
+class LLDBBasicObjectProvider:
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value: lldb.SBValue = value
+
+    def num_children(self):
+        if self.value is not None:
+            return self.value.num_children
+        return 0
+
+    def get_child_index(self, name: str):
+        return self.value.GetIndexOfChildWithName(name)
+
+    def get_child_at_index(self, index):
+        return self.value.GetChildAtIndex(index)
+
+    def update(self):
+        pass
+
+    def has_children(self):
+        return self.num_children() > 0
+
+
+class CustomObjectChildrenProvider:
+    """
+    This children provider handles values returned from lldbDebugSynthetic*
+    Nim procedures
+    """
+
+    def __init__(self, value: lldb.SBValue, internalDict):
+        self.value: lldb.SBValue = get_custom_synthetic(value) or value
+        if "lldbdynamicobject" in self.value.type.name.lower():
+            self.provider = LLDBDynamicObjectProvider(self.value, internalDict)
+        else:
+            self.provider = LLDBBasicObjectProvider(self.value, internalDict)
+
+    def num_children(self):
+        return self.provider.num_children()
+
+    def get_child_index(self, name: str):
+        return self.provider.get_child_index(name)
+
+    def get_child_at_index(self, index):
+        return self.provider.get_child_at_index(index)
+
+    def update(self):
+        self.provider.update()
+
+    def has_children(self):
+        return self.provider.has_children()
+
+
+def echo(debugger: lldb.SBDebugger, command: str, result, internal_dict):
+    debugger.HandleCommand("po " + command)
+
+
+SUMMARY_FUNCTIONS: dict[str, lldb.SBFunction] = {}
+SYNTHETIC_FUNCTIONS: dict[str, lldb.SBFunction] = {}
+
+
+def get_custom_summary(value: lldb.SBValue) -> Union[str, None]:
+    """Get a custom summary if a function exists for it"""
+    value = value.GetNonSyntheticValue()
+    if value.GetAddress().GetOffset() == 0:
+        return None
+
+    base_type = get_base_type(value.type)
+
+    fn = SUMMARY_FUNCTIONS.get(base_type.name)
+    if fn is None:
+        return None
+
+    fn_type: lldb.SBType = fn.type
+
+    arg_types: lldb.SBTypeList = fn_type.GetFunctionArgumentTypes()
+    first_type = arg_types.GetTypeAtIndex(0)
+
+    while value.type.is_pointer:
+        value = value.Dereference()
+
+    if first_type.is_pointer:
+        command = f"{fn.name}(({first_type.name})" + str(value.GetLoadAddress()) + ");"
+    else:
+        command = f"{fn.name}(*({first_type.GetPointerType().name})" + str(value.GetLoadAddress()) + ");"
+
+    res = executeCommand(command)
+
+    if res.error.fail:
+        return None
+
+    return res.summary.strip('"')
+
+
+def get_custom_value_summary(value: lldb.SBValue) -> Union[str, None]:
+    """Get a custom summary if a function exists for it"""
+
+    fn: lldb.SBFunction = SUMMARY_FUNCTIONS.get(value.type.name)
+    if fn is None:
+        return None
+
+    command = f"{fn.name}(({value.type.name})" + str(value.signed) + ");"
+    res = executeCommand(command)
+
+    if res.error.fail:
+        return None
+
+    return res.summary.strip('"')
+
+
+def get_custom_synthetic(value: lldb.SBValue) -> Union[lldb.SBValue, None]:
+    """Get a custom synthetic object if a function exists for it"""
+    value = value.GetNonSyntheticValue()
+    if value.GetAddress().GetOffset() == 0:
+        return None
+
+    base_type = get_base_type(value.type)
+
+    fn = SYNTHETIC_FUNCTIONS.get(base_type.name)
+    if fn is None:
+        return None
+
+    fn_type: lldb.SBType = fn.type
+
+    arg_types: lldb.SBTypeList = fn_type.GetFunctionArgumentTypes()
+    first_type = arg_types.GetTypeAtIndex(0)
+
+    while value.type.is_pointer:
+        value = value.Dereference()
+
+    if first_type.is_pointer:
+        first_arg = f"({first_type.name}){value.GetLoadAddress()}"
+    else:
+        first_arg = f"*({first_type.GetPointerType().name}){value.GetLoadAddress()}"
+
+    if arg_types.GetSize() > 1 and fn.GetArgumentName(1) == "Result":
+        ret_type = arg_types.GetTypeAtIndex(1)
+        ret_type = get_base_type(ret_type)
+
+        command = f"""
+            {ret_type.name} lldbT;
+            nimZeroMem((void*)(&lldbT), sizeof({ret_type.name}));
+            {fn.name}(({first_arg}), (&lldbT));
+            lldbT;
+        """
+    else:
+        command = f"{fn.name}({first_arg});"
+
+    res = executeCommand(command)
+
+    if res.error.fail:
+        print(res.error)
+        return None
+
+    return res
+
+
+def get_base_type(ty: lldb.SBType) -> lldb.SBType:
+    """Get the base type of the type"""
+    temp = ty
+    while temp.IsPointerType():
+        temp = temp.GetPointeeType()
+    return temp
+
+
+def use_base_type(ty: lldb.SBType) -> bool:
+    types_to_check = [
+        "NF",
+        "NF32",
+        "NF64",
+        "NI",
+        "NI8",
+        "NI16",
+        "NI32",
+        "NI64",
+        "bool",
+        "NIM_BOOL",
+        "NU",
+        "NU8",
+        "NU16",
+        "NU32",
+        "NU64",
+    ]
+
+    for type_to_check in types_to_check:
+        if ty.name.startswith(type_to_check):
+            return False
+
+    return True
+
+
+def breakpoint_function_wrapper(frame: lldb.SBFrame, bp_loc, internal_dict):
+    """This allows function calls to Nim for custom object summaries and synthetic children"""
+    debugger = lldb.debugger
+
+    global SUMMARY_FUNCTIONS
+    global SYNTHETIC_FUNCTIONS
+
+    global NIM_IS_V2
+
+    for tname, fn in SYNTHETIC_FUNCTIONS.items():
+        debugger.HandleCommand(f"type synthetic delete -w nim {tname}")
+
+    SUMMARY_FUNCTIONS = {}
+    SYNTHETIC_FUNCTIONS = {}
+
+    target: lldb.SBTarget = debugger.GetSelectedTarget()
+
+    NIM_IS_V2 = target.FindFirstType("TNimTypeV2").IsValid()
+
+    module = frame.GetSymbolContext(lldb.eSymbolContextModule).module
+
+    for sym in module:
+        if (
+            not sym.name.startswith("lldbDebugSummary")
+            and not sym.name.startswith("lldbDebugSynthetic")
+            and not sym.name.startswith("dollar___")
+        ):
+            continue
+
+        fn_syms: lldb.SBSymbolContextList = target.FindFunctions(sym.name)
+        if not fn_syms.GetSize() > 0:
+            continue
+
+        fn_sym: lldb.SBSymbolContext = fn_syms.GetContextAtIndex(0)
+
+        fn: lldb.SBFunction = fn_sym.function
+        fn_type: lldb.SBType = fn.type
+        arg_types: lldb.SBTypeList = fn_type.GetFunctionArgumentTypes()
+
+        if arg_types.GetSize() > 1 and fn.GetArgumentName(1) == "Result":
+            pass # don't continue
+        elif arg_types.GetSize() != 1:
+            continue
+
+        arg_type: lldb.SBType = arg_types.GetTypeAtIndex(0)
+        if use_base_type(arg_type):
+            arg_type = get_base_type(arg_type)
+
+        if sym.name.startswith("lldbDebugSummary") or sym.name.startswith("dollar___"):
+            SUMMARY_FUNCTIONS[arg_type.name] = fn
+        elif sym.name.startswith("lldbDebugSynthetic"):
+            SYNTHETIC_FUNCTIONS[arg_type.name] = fn
+            debugger.HandleCommand(
+                f"type synthetic add -w nim -l {__name__}.CustomObjectChildrenProvider {arg_type.name}"
+            )
+
+
+def executeCommand(command, *args):
+    debugger = lldb.debugger
+    process = debugger.GetSelectedTarget().GetProcess()
+    frame: lldb.SBFrame = process.GetSelectedThread().GetSelectedFrame()
+
+    expr_options = lldb.SBExpressionOptions()
+    expr_options.SetIgnoreBreakpoints(False)
+    expr_options.SetFetchDynamicValue(lldb.eDynamicCanRunTarget)
+    expr_options.SetTimeoutInMicroSeconds(30 * 1000 * 1000)  # 30 second timeout
+    expr_options.SetTryAllThreads(True)
+    expr_options.SetUnwindOnError(False)
+    expr_options.SetGenerateDebugInfo(True)
+    expr_options.SetLanguage(lldb.eLanguageTypeC)
+    expr_options.SetCoerceResultToId(True)
+    res = frame.EvaluateExpression(command, expr_options)
+
+    return res
+
+
+def __lldb_init_module(debugger, internal_dict):
+    # fmt: off
+    debugger.HandleCommand(f"breakpoint command add -F {__name__}.breakpoint_function_wrapper --script-type python 1")
+    debugger.HandleCommand(f"type summary add -w nim -n sequence -F  {__name__}.Sequence -x tySequence_+[[:alnum:]]+$")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.SeqChildrenProvider -x tySequence_+[[:alnum:]]+$")
+
+    debugger.HandleCommand(f"type summary add -w nim -n chararray -F  {__name__}.CharArray -x char\s+[\d+]")
+    debugger.HandleCommand(f"type summary add -w nim -n array -F  {__name__}.Array -x tyArray_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.ArrayChildrenProvider -x tyArray_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n string -F  {__name__}.NimString NimStringDesc")
+
+    debugger.HandleCommand(f"type summary add -w nim -n stringv2 -F {__name__}.NimString -x NimStringV2$")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.StringChildrenProvider -x NimStringV2$")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.StringChildrenProvider -x NimStringDesc$")
+
+    debugger.HandleCommand(f"type summary add -w nim -n cstring -F  {__name__}.NCSTRING NCSTRING")
+
+    debugger.HandleCommand(f"type summary add -w nim -n object -F  {__name__}.ObjectV2 -x tyObject_+[[:alnum:]]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.ObjectChildrenProvider -x tyObject_+[[:alnum:]]+_+[[:alnum:]]+$")
+
+    debugger.HandleCommand(f"type summary add -w nim -n tframe -F  {__name__}.ObjectV2 -x TFrame$")
+
+    debugger.HandleCommand(f"type summary add -w nim -n rootobj -F  {__name__}.ObjectV2 -x RootObj$")
+
+    debugger.HandleCommand(f"type summary add -w nim -n enum -F  {__name__}.Enum -x tyEnum_+[[:alnum:]]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n hashset -F  {__name__}.HashSet -x tyObject_+HashSet_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.HashSetChildrenProvider -x tyObject_+HashSet_+[[:alnum:]]+")
+
+    debugger.HandleCommand(f"type summary add -w nim -n rope -F  {__name__}.Rope -x tyObject_+Rope[[:alnum:]]+_+[[:alnum:]]+")
+
+    debugger.HandleCommand(f"type summary add -w nim -n setuint -F  {__name__}.Set -x tySet_+tyInt_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.SetIntChildrenProvider -x tySet_+tyInt[0-9]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n setint -F  {__name__}.Set -x tySet_+tyInt[0-9]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n setuint2 -F  {__name__}.Set -x tySet_+tyUInt[0-9]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.SetUIntChildrenProvider -x tySet_+tyUInt[0-9]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.SetUIntChildrenProvider -x tySet_+tyInt_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n setenum -F  {__name__}.EnumSet -x tySet_+tyEnum_+[[:alnum:]]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.SetEnumChildrenProvider -x tySet_+tyEnum_+[[:alnum:]]+_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n setchar -F  {__name__}.Set -x tySet_+tyChar_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.SetCharChildrenProvider -x tySet_+tyChar_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n table -F  {__name__}.Table -x tyObject_+Table_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.TableChildrenProvider -x tyObject_+Table_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n stringtable -F  {__name__}.StringTable -x tyObject_+StringTableObj_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type synthetic add -w nim -l {__name__}.StringTableChildrenProvider -x tyObject_+StringTableObj_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n tuple2 -F  {__name__}.Tuple -x tyObject_+Tuple_+[[:alnum:]]+")
+    debugger.HandleCommand(f"type summary add -w nim -n tuple -F  {__name__}.Tuple -x tyTuple_+[[:alnum:]]+")
+
+    debugger.HandleCommand(f"type summary add -w nim -n float -F  {__name__}.Float NF")
+    debugger.HandleCommand(f"type summary add -w nim -n float32 -F  {__name__}.Float NF32")
+    debugger.HandleCommand(f"type summary add -w nim -n float64 -F  {__name__}.Float NF64")
+    debugger.HandleCommand(f"type summary add -w nim -n integer -F  {__name__}.Number -x NI")
+    debugger.HandleCommand(f"type summary add -w nim -n integer8 -F  {__name__}.Number -x NI8")
+    debugger.HandleCommand(f"type summary add -w nim -n integer16 -F  {__name__}.Number -x NI16")
+    debugger.HandleCommand(f"type summary add -w nim -n integer32 -F  {__name__}.Number -x NI32")
+    debugger.HandleCommand(f"type summary add -w nim -n integer64 -F  {__name__}.Number -x NI64")
+    debugger.HandleCommand(f"type summary add -w nim -n bool -F  {__name__}.Bool -x bool")
+    debugger.HandleCommand(f"type summary add -w nim -n bool2 -F  {__name__}.Bool -x NIM_BOOL")
+    debugger.HandleCommand(f"type summary add -w nim -n uinteger -F  {__name__}.UnsignedNumber -x NU")
+    debugger.HandleCommand(f"type summary add -w nim -n uinteger8 -F  {__name__}.UnsignedNumber -x NU8")
+    debugger.HandleCommand(f"type summary add -w nim -n uinteger16 -F  {__name__}.UnsignedNumber -x NU16")
+    debugger.HandleCommand(f"type summary add -w nim -n uinteger32 -F  {__name__}.UnsignedNumber -x NU32")
+    debugger.HandleCommand(f"type summary add -w nim -n uinteger64 -F  {__name__}.UnsignedNumber -x NU64")
+    debugger.HandleCommand("type category enable nim")
+    debugger.HandleCommand(f"command script add -f  {__name__}.echo echo")
+    # fmt: on
diff --git a/tools/detect/detect.nim b/tools/detect/detect.nim
index ed1caf78c..ed9438494 100644
--- a/tools/detect/detect.nim
+++ b/tools/detect/detect.nim
@@ -618,6 +618,7 @@ v("MAP_POPULATE", no_other = true)
 
 header("<sys/resource.h>")
 v("RLIMIT_NOFILE")
+v("RLIMIT_STACK")
 
 header("<sys/select.h>")
 v("FD_SETSIZE")
@@ -629,6 +630,7 @@ v("MSG_EOR")
 v("MSG_OOB")
 v("SCM_RIGHTS")
 v("SO_ACCEPTCONN")
+v("SO_BINDTODEVICE")
 v("SO_BROADCAST")
 v("SO_DEBUG")
 v("SO_DONTROUTE")
diff --git a/tools/dochack/dochack.nim b/tools/dochack/dochack.nim
index 4067d2ed4..946945346 100644
--- a/tools/dochack/dochack.nim
+++ b/tools/dochack/dochack.nim
@@ -1,5 +1,6 @@
 import dom
 import fuzzysearch
+import std/[jsfetch, asyncjs]
 
 
 proc setTheme(theme: cstring) {.exportc.} =
@@ -252,21 +253,7 @@ proc escapeCString(x: var cstring) =
 
 proc dosearch(value: cstring): Element =
   if db.len == 0:
-    var stuff: Element
-    {.emit: """
-    var request = new XMLHttpRequest();
-    request.open("GET", document.getElementById("indexLink").href, false);
-    request.send(null);
-
-    var doc = document.implementation.createHTMLDocument("theindex");
-    doc.documentElement.innerHTML = request.responseText;
-
-    `stuff` = doc.documentElement;
-    """.}
-    db = stuff.getElementsByClass"reference"
-    contents = @[]
-    for ahref in db:
-      contents.add ahref.getAttribute("data-doc-search-tag")
+    return
   let ul = tree("UL")
   result = tree("DIV")
   result.setClass"search_results"
@@ -293,8 +280,28 @@ proc dosearch(value: cstring): Element =
     result.add tree("B", text"search results")
     result.add ul
 
-var oldtoc: Element
-var timer: Timeout
+proc loadIndex() {.async.} =
+  ## Loads theindex.html to enable searching
+  let
+    indexURL = document.getElementById("indexLink").getAttribute("href")
+    # Get root of project documentation by cutting off theindex.html from index href
+    rootURL = ($indexURL)[0 ..< ^"theindex.html".len]
+  var resp = fetch(indexURL).await().text().await()
+  # Convert into element so we can use DOM functions to parse the html
+  var indexElem = document.createElement("div")
+  indexElem.innerHtml = resp
+  # Add items into the DB/contents
+  for href in indexElem.getElementsByClass("reference"):
+    # Make links be relative to project root instead of current page
+    href.setAttr("href", cstring(rootURL & $href.getAttribute("href")))
+    db &= href
+    contents &= href.getAttribute("data-doc-search-tag")
+
+
+var
+  oldtoc: Element
+  timer: Timeout
+  loadIndexFut: Future[void] = nil
 
 proc search*() {.exportc.} =
   proc wrapper() =
@@ -307,7 +314,12 @@ proc search*() {.exportc.} =
       replaceById("tocRoot", results)
     elif not oldtoc.isNil:
       replaceById("tocRoot", oldtoc)
-
+  # Start loading index as soon as user starts typing.
+  # Will only be loaded the once anyways
+  if loadIndexFut == nil:
+    loadIndexFut = loadIndex()
+    # Run wrapper once loaded so we don't miss the users query
+    discard loadIndexFut.then(wrapper)
   if timer != nil: clearTimeout(timer)
   timer = setTimeout(wrapper, 400)
 
@@ -316,8 +328,8 @@ proc copyToClipboard*() {.exportc.} =
 
     function updatePreTags() {
 
-      const allPreTags = document.querySelectorAll("pre")
-    
+      const allPreTags = document.querySelectorAll("pre:not(.line-nums)")
+
       allPreTags.forEach((e) => {
       
           const div = document.createElement("div")
diff --git a/tools/grammar_nanny.nim b/tools/grammar_nanny.nim
index bcb3a044f..cbdc51efc 100644
--- a/tools/grammar_nanny.nim
+++ b/tools/grammar_nanny.nim
@@ -22,7 +22,6 @@ proc checkGrammarFileImpl(cache: IdentCache, config: ConfigRef) =
     var
       L: Lexer
       tok: Token
-    initToken(tok)
     openLexer(L, f, stream, cache, config)
     # load the first token:
     rawGetTok(L, tok)
diff --git a/tools/kochdocs.nim b/tools/kochdocs.nim
index 9d6e37cab..477fb29fa 100644
--- a/tools/kochdocs.nim
+++ b/tools/kochdocs.nim
@@ -1,13 +1,13 @@
 ## Part of 'koch' responsible for the documentation generation.
 
-import std/[os, strutils, osproc, sets, pathnorm, sequtils]
+import std/[os, strutils, osproc, sets, pathnorm, sequtils, pegs]
+
+import officialpackages
+export exec
 
 when defined(nimPreviewSlimSystem):
   import std/assertions
 
-# XXX: Remove this feature check once the csources supports it.
-when defined(nimHasCastPragmaBlocks):
-  import std/pegs
 from std/private/globs import nativeToUnixPath, walkDirRecFilter, PathEntry
 import "../compiler/nimpaths"
 
@@ -48,18 +48,6 @@ proc findNimImpl*(): tuple[path: string, ok: bool] =
 
 proc findNim*(): string = findNimImpl().path
 
-proc exec*(cmd: string, errorcode: int = QuitFailure, additionalPath = "") =
-  let prevPath = getEnv("PATH")
-  if additionalPath.len > 0:
-    var absolute = additionalPath
-    if not absolute.isAbsolute:
-      absolute = getCurrentDir() / absolute
-    echo("Adding to $PATH: ", absolute)
-    putEnv("PATH", (if prevPath.len > 0: prevPath & PathSep else: "") & absolute)
-  echo(cmd)
-  if execShellCmd(cmd) != 0: quit("FAILURE", errorcode)
-  putEnv("PATH", prevPath)
-
 template inFold*(desc, body) =
   if existsEnv("GITHUB_ACTIONS"):
     echo "::group::" & desc
@@ -105,17 +93,22 @@ proc nimCompileFold*(desc, input: string, outputDir = "bin", mode = "c", options
   let cmd = findNim().quoteShell() & " " & mode & " -o:" & output & " " & options & " " & input
   execFold(desc, cmd)
 
+const officialPackagesMarkdown = """
+pkgs/atlas/doc/atlas.md
+""".splitWhitespace()
+
 proc getMd2html(): seq[string] =
   for a in walkDirRecFilter("doc"):
     let path = a.path
     if a.kind == pcFile and path.splitFile.ext == ".md" and path.lastPathPart notin
-        ["docs.md", "nimfix.md",
+        ["docs.md",
          "docstyle.md" # docstyle.md shouldn't be converted to html separately;
                        # it's included in contributing.md.
         ]:
-          # maybe we should still show nimfix, could help reviving it
           # `docs` is redundant with `overview`, might as well remove that file?
       result.add path
+  for md in officialPackagesMarkdown:
+    result.add md
   doAssert "doc/manual/var_t_return.md".unixToNativePath in result # sanity check
 
 const
@@ -131,11 +124,7 @@ mm.md
 """.splitWhitespace().mapIt("doc" / it)
 
   withoutIndex = """
-lib/wrappers/mysql.nim
-lib/wrappers/sqlite3.nim
-lib/wrappers/postgres.nim
 lib/wrappers/tinyc.nim
-lib/wrappers/odbcsql.nim
 lib/wrappers/pcre.nim
 lib/wrappers/openssl.nim
 lib/posix/posix.nim
@@ -163,6 +152,34 @@ lib/posix/posix_other_consts.nim
 lib/posix/posix_freertos_consts.nim
 lib/posix/posix_openbsd_amd64.nim
 lib/posix/posix_haiku.nim
+lib/pure/md5.nim
+lib/std/sha1.nim
+lib/pure/htmlparser.nim
+""".splitWhitespace()
+
+  officialPackagesList = """
+pkgs/asyncftpclient/src/asyncftpclient.nim
+pkgs/smtp/src/smtp.nim
+pkgs/punycode/src/punycode.nim
+pkgs/db_connector/src/db_connector/db_common.nim
+pkgs/db_connector/src/db_connector/db_mysql.nim
+pkgs/db_connector/src/db_connector/db_odbc.nim
+pkgs/db_connector/src/db_connector/db_postgres.nim
+pkgs/db_connector/src/db_connector/db_sqlite.nim
+pkgs/checksums/src/checksums/md5.nim
+pkgs/checksums/src/checksums/sha1.nim
+pkgs/checksums/src/checksums/sha2.nim
+pkgs/checksums/src/checksums/sha3.nim
+pkgs/checksums/src/checksums/bcrypt.nim
+pkgs/htmlparser/src/htmlparser.nim
+""".splitWhitespace()
+
+  officialPackagesListWithoutIndex = """
+pkgs/db_connector/src/db_connector/mysql.nim
+pkgs/db_connector/src/db_connector/sqlite3.nim
+pkgs/db_connector/src/db_connector/postgres.nim
+pkgs/db_connector/src/db_connector/odbcsql.nim
+pkgs/db_connector/src/db_connector/private/dbutils.nim
 """.splitWhitespace()
 
 when (NimMajor, NimMinor) < (1, 1) or not declared(isRelativeTo):
@@ -173,6 +190,7 @@ when (NimMajor, NimMinor) < (1, 1) or not declared(isRelativeTo):
     result = path.len > 0 and not ret.startsWith ".."
 
 proc getDocList(): seq[string] =
+  ##
   var docIgnore: HashSet[string]
   for a in withoutIndex: docIgnore.incl a
   for a in ignoredModules: docIgnore.incl a
@@ -224,44 +242,48 @@ proc buildDocSamples(nimArgs, destPath: string) =
   exec(findNim().quoteShell() & " doc $# -o:$# $#" %
     [nimArgs, destPath / "docgen_sample.html", "doc" / "docgen_sample.nim"])
 
-proc buildDocPackages(nimArgs, destPath: string) =
+proc buildDocPackages(nimArgs, destPath: string, indexOnly: bool) =
   # compiler docs; later, other packages (perhaps tools, testament etc)
   let nim = findNim().quoteShell()
     # to avoid broken links to manual from compiler dir, but a multi-package
     # structure could be supported later
 
   proc docProject(outdir, options, mainproj: string) =
-    exec("$nim doc --project --outdir:$outdir $nimArgs --git.url:$gitUrl $options $mainproj" % [
+    exec("$nim doc --project --outdir:$outdir $nimArgs --git.url:$gitUrl $index $options $mainproj" % [
       "nim", nim,
       "outdir", outdir,
       "nimArgs", nimArgs,
       "gitUrl", gitUrl,
       "options", options,
       "mainproj", mainproj,
+      "index", if indexOnly: "--index:only" else: ""
       ])
   let extra = "-u:boot"
   # xxx keep in sync with what's in $nim_prs_D/config/nimdoc.cfg, or, rather,
   # start using nims instead of nimdoc.cfg
   docProject(destPath/"compiler", extra, "compiler/index.nim")
 
-proc buildDoc(nimArgs, destPath: string) =
+proc buildDoc(nimArgs, destPath: string, indexOnly: bool) =
   # call nim for the documentation:
   let rst2html = getMd2html()
   var
-    commands = newSeq[string](rst2html.len + len(doc) + withoutIndex.len)
+    commands = newSeq[string](rst2html.len + len(doc) + withoutIndex.len +
+              officialPackagesList.len + officialPackagesListWithoutIndex.len)
     i = 0
   let nim = findNim().quoteShell()
+
+  let index = if indexOnly: "--index:only" else: ""
   for d in items(rst2html):
-    commands[i] = nim & " md2html $# --git.url:$# -o:$# --index:on $#" %
+    commands[i] = nim & " md2html $# --git.url:$# -o:$# $# $#" %
       [nimArgs, gitUrl,
-      destPath / changeFileExt(splitFile(d).name, "html"), d]
+      destPath / changeFileExt(splitFile(d).name, "html"), index, d]
     i.inc
   for d in items(doc):
     let extra = if isJsOnly(d): "--backend:js" else: ""
     var nimArgs2 = nimArgs
     if d.isRelativeTo("compiler"): doAssert false
-    commands[i] = nim & " doc $# $# --git.url:$# --outdir:$# --index:on $#" %
-      [extra, nimArgs2, gitUrl, destPath, d]
+    commands[i] = nim & " doc $# $# --git.url:$# --outdir:$# $# $#" %
+      [extra, nimArgs2, gitUrl, destPath, index, d]
     i.inc
   for d in items(withoutIndex):
     commands[i] = nim & " doc $# --git.url:$# -o:$# $#" %
@@ -269,13 +291,20 @@ proc buildDoc(nimArgs, destPath: string) =
       destPath / changeFileExt(splitFile(d).name, "html"), d]
     i.inc
 
+
+  for d in items(officialPackagesList):
+    var nimArgs2 = nimArgs
+    if d.isRelativeTo("compiler"): doAssert false
+    commands[i] = nim & " doc $# --outdir:$# --index:on $#" %
+      [nimArgs2, destPath, d]
+    i.inc
+  for d in items(officialPackagesListWithoutIndex):
+    commands[i] = nim & " doc $# -o:$# $#" %
+      [nimArgs,
+      destPath / changeFileExt(splitFile(d).name, "html"), d]
+    i.inc
+
   mexec(commands)
-  exec(nim & " buildIndex -o:$1/theindex.html $1" % [destPath])
-    # caveat: this works so long it's called before `buildDocPackages` which
-    # populates `compiler/` with unrelated idx files that shouldn't be in index,
-    # so should work in CI but you may need to remove your generated html files
-    # locally after calling `./koch docs`. The clean fix would be for `idx` files
-    # to be transient with `--project` (eg all in memory).
 
 proc nim2pdf(src: string, dst: string, nimArgs: string) =
   # xxx expose as a `nim` command or in some other reusable way.
@@ -296,7 +325,8 @@ proc nim2pdf(src: string, dst: string, nimArgs: string) =
       exec(cmd)
   moveFile(texFile.changeFileExt("pdf"), dst)
 
-proc buildPdfDoc*(nimArgs, destPath: string) =
+proc buildPdfDoc*(args: string, destPath: string) =
+  let args = nimArgs & " " & args
   var pdfList: seq[string]
   createDir(destPath)
   if os.execShellCmd("xelatex -version") != 0:
@@ -305,7 +335,7 @@ proc buildPdfDoc*(nimArgs, destPath: string) =
     for src in items(mdPdfList):
       let dst = destPath / src.lastPathPart.changeFileExt("pdf")
       pdfList.add dst
-      nim2pdf(src, dst, nimArgs)
+      nim2pdf(src, dst, args)
   echo "\nOutput PDF files: \n  ", pdfList.join(" ") # because `nim2pdf` is a bit verbose
 
 proc buildJS(): string =
@@ -318,11 +348,26 @@ proc buildJS(): string =
 proc buildDocsDir*(args: string, dir: string) =
   let args = nimArgs & " " & args
   let docHackJsSource = buildJS()
+  gitClonePackages(@["asyncftpclient", "punycode", "smtp", "db_connector", "checksums", "atlas", "htmlparser"])
   createDir(dir)
   buildDocSamples(args, dir)
-  buildDoc(args, dir) # bottleneck
+
+  # generate `.idx` files and top-level `theindex.html`:
+  buildDoc(args, dir, indexOnly=true) # bottleneck
+  let nim = findNim().quoteShell()
+  exec(nim & " buildIndex -o:$1/theindex.html $1" % [dir])
+    # caveat: this works so long it's called before `buildDocPackages` which
+    # populates `compiler/` with unrelated idx files that shouldn't be in index,
+    # so should work in CI but you may need to remove your generated html files
+    # locally after calling `./koch docs`. The clean fix would be for `idx` files
+    # to be transient with `--project` (eg all in memory).
+  buildDocPackages(args, dir, indexOnly=true)
+
+  # generate HTML and package-level `theindex.html`:
+  buildDoc(args, dir, indexOnly=false) # bottleneck
+  buildDocPackages(args, dir, indexOnly=false)
+
   copyFile(dir / "overview.html", dir / "index.html")
-  buildDocPackages(args, dir)
   copyFile(docHackJsSource, dir / docHackJsSource.lastPathPart)
 
 proc buildDocs*(args: string, localOnly = false, localOutDir = "") =
@@ -337,9 +382,7 @@ proc buildDocs*(args: string, localOnly = false, localOutDir = "") =
   if not localOnly:
     buildDocsDir(args, webUploadOutput / NimVersion)
 
-    # XXX: Remove this feature check once the csources supports it.
-    when defined(nimHasCastPragmaBlocks):
-      let gaFilter = peg"@( y'--doc.googleAnalytics:' @(\s / $) )"
-      args = args.replace(gaFilter)
+    let gaFilter = peg"@( y'--doc.googleAnalytics:' @(\s / $) )"
+    args = args.replace(gaFilter)
 
   buildDocsDir(args, localOutDir)
diff --git a/tools/nimgrab.nim b/tools/nimgrab.nim
index 7e4161faf..c86159739 100644
--- a/tools/nimgrab.nim
+++ b/tools/nimgrab.nim
@@ -1,13 +1,20 @@
 import std/[os, httpclient]
 
 proc syncDownload(url, file: string) =
-  var client = newHttpClient()
+  let client = newHttpClient()
   proc onProgressChanged(total, progress, speed: BiggestInt) =
-    echo "Downloading " & url & " " & $(speed div 1000) & "kb/s"
-    echo clamp(int(progress*100 div total), 0, 100), "%"
+    var message = "Downloading "
+    message.add url
+    message.add ' '
+    message.addInt speed div 1000
+    message.add "kb/s\n"
+    message.add $clamp(int(progress * 100 div total), 0, 100)
+    message.add '%'
+    echo message
 
   client.onProgressChanged = onProgressChanged
   client.downloadFile(url, file)
+  client.close()
   echo "100%"
 
 if os.paramCount() != 2:
diff --git a/tools/nimgrep.nim.cfg b/tools/nimgrep.nim.cfg
deleted file mode 100644
index 64d3edc7a..000000000
--- a/tools/nimgrep.nim.cfg
+++ /dev/null
@@ -1,4 +0,0 @@
-# don't use --gc:refc because of bug
-# https://github.com/nim-lang/Nim/issues/14138 .
-# --gc:orc and --gc:markandsweep work well.
---threads:on --gc:orc
diff --git a/tools/niminst/buildsh.nimf b/tools/niminst/buildsh.nimf
index 6b99c49ee..063a02779 100644
--- a/tools/niminst/buildsh.nimf
+++ b/tools/niminst/buildsh.nimf
@@ -122,6 +122,7 @@ case $uos in
   *netbsd* )
     myos="netbsd"
     LINK_FLAGS="$LINK_FLAGS -lm"
+    ucpu=`uname -p`
     ;;
   *darwin* )
     myos="macosx"
diff --git a/tools/niminst/makefile.nimf b/tools/niminst/makefile.nimf
index 2fe89ed69..002bc0592 100644
--- a/tools/niminst/makefile.nimf
+++ b/tools/niminst/makefile.nimf
@@ -45,6 +45,7 @@ endif
 ifeq ($(uos),netbsd)
   myos = netbsd
   LDFLAGS += -lm
+  ucpu = $(shell sh -c 'uname -p')
 endif
 ifeq ($(uos),darwin)
   myos = macosx
@@ -173,6 +174,9 @@ endif
 ifeq ($(ucpu),aarch64)
   mycpu = arm64
 endif
+ifeq ($(ucpu),arm64)
+  mycpu = arm64
+endif
 ifeq ($(ucpu),riscv64)
   mycpu = riscv64
 endif
diff --git a/tools/niminst/niminst.nim b/tools/niminst/niminst.nim
index cd2e5a481..40ee79814 100644
--- a/tools/niminst/niminst.nim
+++ b/tools/niminst/niminst.nim
@@ -8,9 +8,9 @@
 #
 
 import
-  os, strutils, parseopt, parsecfg, strtabs, streams, debcreation,
-  std / sha1
+  os, strutils, parseopt, parsecfg, strtabs, streams, debcreation
 
+import ../../dist/checksums/src/checksums/sha1
 
 when defined(nimPreviewSlimSystem):
   import std/syncio
@@ -168,11 +168,11 @@ proc parseCmdLine(c: var ConfigData) =
     next(p)
     var kind = p.kind
     var key = p.key
-    var val = p.val.string
+    var val = p.val
     case kind
     of cmdArgument:
       if c.actions == {}:
-        for a in split(normalize(key.string), {';', ','}):
+        for a in split(normalize(key), {';', ','}):
           case a
           of "csource": incl(c.actions, actionCSource)
           of "scripts": incl(c.actions, actionScripts)
@@ -183,11 +183,11 @@ proc parseCmdLine(c: var ConfigData) =
           of "deb": incl(c.actions, actionDeb)
           else: quit(Usage)
       else:
-        c.infile = addFileExt(key.string, "ini")
-        c.nimArgs = cmdLineRest(p).string
+        c.infile = addFileExt(key, "ini")
+        c.nimArgs = cmdLineRest(p)
         break
     of cmdLongOption, cmdShortOption:
-      case normalize(key.string)
+      case normalize(key)
       of "help", "h":
         stdout.write(Usage)
         quit(0)
@@ -515,6 +515,17 @@ template gatherFiles(fun, libpath, outDir) =
     # commenting out for now, see discussion in https://github.com/nim-lang/Nim/pull/13413
     # copySrc(libpath / "lib/wrappers/linenoise/linenoise.h")
 
+proc exe(f: string): string =
+  result = addFileExt(f, ExeExt)
+  when defined(windows):
+    result = result.replace('/','\\')
+
+proc findNim(): string =
+  let nim = "nim".exe
+  result = quoteShell("bin" / nim)
+  if not fileExists(result):
+    result = "nim"
+
 proc srcdist(c: var ConfigData) =
   let cCodeDir = getOutputDir(c) / "c_code"
   if not dirExists(cCodeDir): createDir(cCodeDir)
@@ -533,10 +544,10 @@ proc srcdist(c: var ConfigData) =
       var dir = getOutputDir(c) / buildDir(osA, cpuA)
       if dirExists(dir): removeDir(dir)
       createDir(dir)
-      var cmd = ("nim compile -f --symbolfiles:off --compileonly " &
+      var cmd = ("$# compile -f --incremental:off --compileonly " &
                  "--gen_mapping --cc:gcc --skipUserCfg" &
                  " --os:$# --cpu:$# $# $#") %
-                 [osname, cpuname, c.nimArgs, c.mainfile]
+                 [findNim(), osname, cpuname, c.nimArgs, c.mainfile]
       echo(cmd)
       if execShellCmd(cmd) != 0:
         quit("Error: call to nim compiler failed")
diff --git a/tools/officialpackages.nim b/tools/officialpackages.nim
new file mode 100644
index 000000000..633944a14
--- /dev/null
+++ b/tools/officialpackages.nim
@@ -0,0 +1,21 @@
+import std/[strformat, paths, dirs, envvars]
+from std/os import execShellCmd
+
+proc exec*(cmd: string, errorcode: int = QuitFailure, additionalPath = "") =
+  let prevPath = getEnv("PATH")
+  if additionalPath.len > 0:
+    var absolute = Path(additionalPath)
+    if not absolute.isAbsolute:
+      absolute = getCurrentDir() / absolute
+    echo("Adding to $PATH: ", string(absolute))
+    putEnv("PATH", (if prevPath.len > 0: prevPath & PathSep else: "") & string(absolute))
+  echo(cmd)
+  if execShellCmd(cmd) != 0: quit("FAILURE", errorcode)
+  putEnv("PATH", prevPath)
+
+proc gitClonePackages*(names: seq[string]) =
+  if not dirExists(Path"pkgs"):
+    createDir(Path"pkgs")
+  for name in names:
+    if not dirExists(Path"pkgs" / Path(name)):
+      exec fmt"git clone https://github.com/nim-lang/{name} pkgs/{name}"
diff --git a/tools/unicode_parsedata.nim b/tools/unicode_parsedata.nim
index cca377f51..bd12998d1 100644
--- a/tools/unicode_parsedata.nim
+++ b/tools/unicode_parsedata.nim
@@ -26,34 +26,54 @@ var
 
 
 proc parseData(data: seq[string]) =
-  for line in data:
-    let
-      fields = line.split(';')
-      code = fields[0].parseHexInt()
-      category = fields[2]
-      uc = fields[12]
-      lc = fields[13]
-      tc = fields[14]
-
+  proc doAdd(firstCode, lastCode: int, category, uc, lc, tc: string) =
     if category notin spaces and category notin letters:
-      continue
+      return
 
+    if firstCode != lastCode:
+      doAssert uc == "" and lc == "" and tc == ""
     if uc.len > 0:
-      let diff = 500 + uc.parseHexInt() - code
-      toUpper.add (code, diff)
+      let diff = 500 + uc.parseHexInt() - firstCode
+      toUpper.add (firstCode, diff)
     if lc.len > 0:
-      let diff = 500 + lc.parseHexInt() - code
-      toLower.add (code, diff)
+      let diff = 500 + lc.parseHexInt() - firstCode
+      toLower.add (firstCode, diff)
     if tc.len > 0 and tc != uc:
       # if titlecase is different than uppercase
-      let diff = 500 + tc.parseHexInt() - code
+      let diff = 500 + tc.parseHexInt() - firstCode
       if diff != 500:
-        toTitle.add (code, diff)
+        toTitle.add (firstCode, diff)
 
-    if category in spaces:
-      unispaces.add code
+    for code in firstCode..lastCode:
+      if category in spaces:
+        unispaces.add code
+      else:
+        alphas.add code
+
+  var idx = 0
+  while idx < data.len:
+    let
+      line = data[idx]
+      fields = line.split(';')
+      code = fields[0].parseHexInt()
+      name = fields[1]
+      category = fields[2]
+      uc = fields[12]
+      lc = fields[13]
+      tc = fields[14]
+    inc(idx)
+    if name.endsWith(", First>"):
+      doAssert idx < data.len
+      let
+        nextLine = data[idx]
+        nextFields = nextLine.split(';')
+        nextCode = nextFields[0].parseHexInt()
+        nextName = nextFields[1]
+      inc(idx)
+      doAssert nextName.endsWith(", Last>")
+      doAdd(code, nextCode, category, uc, lc, tc)
     else:
-      alphas.add code
+      doAdd(code, code, category, uc, lc, tc)
 
 proc splitRanges(a: seq[Singlets], r: var seq[Ranges], s: var seq[Singlets]) =
   ## Splits `toLower`, `toUpper` and `toTitle` into separate sequences:
@@ -153,18 +173,18 @@ proc createHeader(output: var string) =
 
 proc `$`(r: Ranges): string =
   let
-    start = "0x" & toHex(r.start, 5)
-    stop = "0x" & toHex(r.stop, 5)
+    start = "0x" & toHex(r.start, 5) & "'i32"
+    stop = "0x" & toHex(r.stop, 5) & "'i32"
   result = "$#, $#, $#,\n" % [start, stop, $r.diff]
 
 proc `$`(r: Singlets): string =
-  let code = "0x" & toHex(r.code, 5)
+  let code = "0x" & toHex(r.code, 5) & "'i32"
   result = "$#, $#,\n" % [code, $r.diff]
 
 proc `$`(r: NonLetterRanges): string =
   let
-    start = "0x" & toHex(r.start, 5)
-    stop = "0x" & toHex(r.stop, 5)
+    start = "0x" & toHex(r.start, 5) & "'i32"
+    stop = "0x" & toHex(r.stop, 5) & "'i32"
   result = "$#, $#,\n" % [start, stop]
 
 
@@ -178,7 +198,7 @@ proc outputSeq(s: seq[Ranges|Singlets|NonLetterRanges], name: string,
 proc outputSeq(s: seq[int], name: string, output: var string) =
   output.add "  $# = [\n" % name
   for i in s:
-    output.add "    0x$#,\n" % toHex(i, 5)
+    output.add "    0x$#'i32,\n" % toHex(i, 5)
   output.add "  ]\n\n"
 
 proc outputSpaces(s: seq[int], name: string, output: var string) =
diff --git a/tools/vccexe/vccexe.nim b/tools/vccexe/vccexe.nim
index abe68c0a0..2a43f7422 100644
--- a/tools/vccexe/vccexe.nim
+++ b/tools/vccexe/vccexe.nim
@@ -41,6 +41,7 @@ const
   platformPrefix = "--platform"
   sdktypePrefix = "--sdktype"
   sdkversionPrefix = "--sdkversion"
+  vctoolsetPrefix = "--vctoolset"
   verbosePrefix = "--verbose"
 
   vccversionSepIdx = vccversionPrefix.len
@@ -49,6 +50,7 @@ const
   platformSepIdx = platformPrefix.len
   sdktypeSepIdx = sdktypePrefix.len
   sdkversionSepIdx = sdkversionPrefix.len
+  vctoolsetSepIdx = vctoolsetPrefix.len
 
   vcvarsallDefaultPath = "vcvarsall.bat"
 
@@ -97,6 +99,8 @@ Options:
                       "8.1" to use the windows 8.1 SDK
   --verbose           Echoes the command line for loading the Developer Command Prompt
                       and the command line passed on to the secondary command.
+  --vctoolset         Optionally specifies the Visual Studio compiler toolset to use. 
+                      By default, the environment is set to use the current Visual Studio compiler toolset.
 
 Other command line arguments are passed on to the
 secondary command specified by --command or to the
@@ -108,7 +112,7 @@ proc parseVccexeCmdLine(argseq: seq[string],
     vccversionArg: var seq[string], printPathArg: var bool,
     vcvarsallArg: var string, commandArg: var string, noCommandArg: var bool,
     platformArg: var VccArch, sdkTypeArg: var VccPlatformType,
-    sdkVersionArg: var string, verboseArg: var bool,
+    sdkVersionArg: var string,  vctoolsetArg: var string, verboseArg: var bool,
     clArgs: var seq[string]) =
   ## Cannot use usual command-line argument parser here
   ## Since vccexe command-line arguments are intermingled
@@ -125,7 +129,7 @@ proc parseVccexeCmdLine(argseq: seq[string],
         responseargs = parseCmdLine(responsecontent)
       parseVccexeCmdLine(responseargs, vccversionArg, printPathArg,
         vcvarsallArg, commandArg, noCommandArg, platformArg, sdkTypeArg,
-        sdkVersionArg, verboseArg, clArgs)
+        sdkVersionArg, vctoolsetArg, verboseArg, clArgs)
     elif wargv.startsWith(vccversionPrefix): # Check for vccversion
       vccversionArg.add(wargv.substr(vccversionSepIdx + 1))
     elif wargv.cmpIgnoreCase(printPathPrefix) == 0: # Check for printPath
@@ -142,6 +146,8 @@ proc parseVccexeCmdLine(argseq: seq[string],
       sdkTypeArg = parseEnum[VccPlatformType](wargv.substr(sdktypeSepIdx + 1))
     elif wargv.startsWith(sdkversionPrefix): # Check for sdkversion
       sdkVersionArg = wargv.substr(sdkversionSepIdx + 1)
+    elif wargv.startsWith(vctoolsetPrefix): # Check for vctoolset
+      vctoolsetArg = wargv.substr(vctoolsetSepIdx + 1)
     elif wargv.startsWith(verbosePrefix):
       verboseArg = true
     else: # Regular cl.exe argument -> store for final cl.exe invocation
@@ -158,13 +164,14 @@ when isMainModule:
   var platformArg: VccArch
   var sdkTypeArg: VccPlatformType
   var sdkVersionArg: string
+  var vctoolsetArg: string
   var verboseArg: bool = false
 
   var clArgs: seq[string] = @[]
 
   let wrapperArgs = commandLineParams()
   parseVccexeCmdLine(wrapperArgs, vccversionArg, printPathArg, vcvarsallArg,
-    commandArg, noCommandArg, platformArg, sdkTypeArg, sdkVersionArg,
+    commandArg, noCommandArg, platformArg, sdkTypeArg, sdkVersionArg, vctoolsetArg,
     verboseArg,
     clArgs)
 
@@ -195,7 +202,7 @@ when isMainModule:
     echo "$1: $2" % [head, vcvarsallArg]
 
   # Call vcvarsall to get the appropriate VCC process environment
-  var vcvars = vccVarsAll(vcvarsallArg, platformArg, sdkTypeArg, sdkVersionArg, verboseArg)
+  var vcvars = vccVarsAll(vcvarsallArg, platformArg, sdkTypeArg, sdkVersionArg, vctoolsetArg, verboseArg)
   if vcvars != nil:
     for vccEnvKey, vccEnvVal in vcvars:
       putEnv(vccEnvKey, vccEnvVal)
@@ -204,6 +211,11 @@ when isMainModule:
   if verboseArg:
     vccOptions.incl poEchoCmd
 
+  let currentDir = getCurrentDir()
+  for arg in clArgs.mitems:
+    if fileExists(arg):
+      arg = relativePath(arg, currentDir)
+
   # Default to the cl.exe command if no secondary command was specified
   if commandArg.len < 1:
     commandArg = "cl.exe"
diff --git a/tools/vccexe/vcvarsall.nim b/tools/vccexe/vcvarsall.nim
index 29d13cc7e..73b103e3c 100644
--- a/tools/vccexe/vcvarsall.nim
+++ b/tools/vccexe/vcvarsall.nim
@@ -33,7 +33,7 @@ type
     vccplatUWP = "uwp", ## Universal Windows Platform (UWP) Application
     vccplatOneCore = "onecore" # Undocumented platform type in the Windows SDK, probably XBox One SDK platform type.
 
-proc vccVarsAll*(path: string, arch: VccArch = vccarchUnspecified, platform_type: VccPlatformType = vccplatEmpty, sdk_version: string = "", verbose: bool = false): StringTableRef =
+proc vccVarsAll*(path: string, arch: VccArch = vccarchUnspecified, platform_type: VccPlatformType = vccplatEmpty, sdk_version, vctoolset: string = "", verbose: bool = false): StringTableRef =
   ## Returns a string table containing the proper process environment to successfully execute VCC compile commands for the specified SDK version, CPU architecture and platform type.
   ##
   ## path
@@ -44,6 +44,8 @@ proc vccVarsAll*(path: string, arch: VccArch = vccarchUnspecified, platform_type
   ##   The compile target Platform Type. Defaults to the Windows Desktop platform, i.e. a regular Windows executable binary.
   ## sdk_version
   ##   The Windows SDK version to use.
+  ## vctoolset
+  ##  Visual Studio compiler toolset to use.
   ## verbose
   ##   Echo the command-line passed on to the system to load the VCC environment. Defaults to `false`.
 
@@ -63,6 +65,9 @@ proc vccVarsAll*(path: string, arch: VccArch = vccarchUnspecified, platform_type
 
   if sdk_version.len > 0:
     args.add(sdk_version)
+  
+  if vctoolset.len > 0:
+    args.add("-vcvars_ver="&vctoolset)
 
   let argStr = args.join " "