about summary refs log tree commit diff stats
path: root/tree-sitter
diff options
context:
space:
mode:
Diffstat (limited to 'tree-sitter')
-rw-r--r--tree-sitter/dsk/PLAN.md363
-rw-r--r--tree-sitter/dsk/PROGRESS.md208
-rw-r--r--tree-sitter/dsk/dsk-cli/.gitignore175
-rw-r--r--tree-sitter/dsk/dsk-cli/README.md126
-rwxr-xr-xtree-sitter/dsk/dsk-cli/bun.lockbbin0 -> 30832 bytes
-rw-r--r--tree-sitter/dsk/dsk-cli/package.json33
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/build.ts429
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/dev.ts105
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/highlight.ts141
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/new.ts485
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/package.ts66
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/self.ts71
-rw-r--r--tree-sitter/dsk/dsk-cli/src/commands/test.ts50
-rw-r--r--tree-sitter/dsk/dsk-cli/src/index.ts80
-rw-r--r--tree-sitter/dsk/dsk-cli/src/utils/grammar-generator.ts466
-rw-r--r--tree-sitter/dsk/dsk-cli/src/utils/inference.ts286
-rw-r--r--tree-sitter/dsk/dsk-cli/src/utils/template-processor.ts192
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/default/README.md82
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/default/corpus/examples.txt85
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/default/examples/hello.__EXT__7
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/default/package.json9
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/js-addon/binding.gyp28
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/js-addon/bindings/node.cc16
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts3
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts.map1
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/js-addon/index.js15
-rw-r--r--tree-sitter/dsk/dsk-cli/templates/js-addon/package.json31
-rw-r--r--tree-sitter/dsk/dsk-cli/test-inference.d.ts5
-rw-r--r--tree-sitter/dsk/dsk-cli/test-inference.d.ts.map1
-rw-r--r--tree-sitter/dsk/dsk-cli/test-inference.js.map1
-rw-r--r--tree-sitter/dsk/dsk-cli/tsconfig.json37
-rw-r--r--tree-sitter/dsk/test-build/generated/c/include/test_lang.h16
-rw-r--r--tree-sitter/dsk/test-build/generated/c/lib/libtest_lang.abin0 -> 4928 bytes
-rw-r--r--tree-sitter/dsk/test-build/generated/js/binding.gyp27
-rw-r--r--tree-sitter/dsk/test-build/generated/js/bindings/node.cc16
-rw-r--r--tree-sitter/dsk/test-build/generated/js/build/Makefile352
-rw-r--r--tree-sitter/dsk/test-build/generated/js/build/Release/.deps/Release/nothing.a.d1
-rw-r--r--tree-sitter/dsk/test-build/generated/js/build/Release/nothing.abin0 -> 984 bytes
-rw-r--r--tree-sitter/dsk/test-build/generated/js/build/binding.Makefile6
-rw-r--r--tree-sitter/dsk/test-build/generated/js/build/config.gypi424
-rwxr-xr-xtree-sitter/dsk/test-build/generated/js/build/gyp-mac-tool772
-rw-r--r--tree-sitter/dsk/test-build/generated/js/build/tree_sitter_test_lang_binding.target.mk206
-rwxr-xr-xtree-sitter/dsk/test-build/generated/js/bun.lockbbin0 -> 1672 bytes
-rw-r--r--tree-sitter/dsk/test-build/generated/js/index.d.ts3
-rw-r--r--tree-sitter/dsk/test-build/generated/js/index.d.ts.map1
-rw-r--r--tree-sitter/dsk/test-build/generated/js/index.js15
-rw-r--r--tree-sitter/dsk/test-build/generated/js/package.json30
-rw-r--r--tree-sitter/dsk/test-build/generated/js/src/grammar.json115
-rw-r--r--tree-sitter/dsk/test-build/generated/js/src/node-types.json112
-rw-r--r--tree-sitter/dsk/test-build/generated/js/src/parser.c522
-rw-r--r--tree-sitter/dsk/test-build/generated/js/src/tree_sitter/alloc.h54
-rw-r--r--tree-sitter/dsk/test-build/generated/js/src/tree_sitter/array.h291
-rw-r--r--tree-sitter/dsk/test-build/generated/js/src/tree_sitter/parser.h286
-rw-r--r--tree-sitter/dsk/test-build/grammar.js48
-rw-r--r--tree-sitter/dsk/test-build/src/grammar.json115
-rw-r--r--tree-sitter/dsk/test-build/src/node-types.json112
-rw-r--r--tree-sitter/dsk/test-build/src/parser.c522
-rw-r--r--tree-sitter/dsk/test-build/src/tree_sitter/alloc.h54
-rw-r--r--tree-sitter/dsk/test-build/src/tree_sitter/array.h291
-rw-r--r--tree-sitter/dsk/test-build/src/tree_sitter/parser.h286
60 files changed, 8274 insertions, 0 deletions
diff --git a/tree-sitter/dsk/PLAN.md b/tree-sitter/dsk/PLAN.md
new file mode 100644
index 0000000..8351854
--- /dev/null
+++ b/tree-sitter/dsk/PLAN.md
@@ -0,0 +1,363 @@
+# Actionable Plan: DSL Development Kit (dsk)
+
+This document outlines the complete implementation plan for `dsk`, a command-line tool designed to streamline the creation of Domain-Specific Languages (DSLs).
+
+
+## **Technical Requirements**
+
+**Target Platforms:** macOS and Unix-like systems (Linux). Windows support is not included in the initial scope.
+
+**Tree-sitter Version:** Use the latest version available through standard package managers (npm or Homebrew). This ensures easy installation and access to the most current features.
+
+**Project Naming:** No specific constraints on DSL project names. The tool will handle standard naming conventions for C libraries and npm packages automatically.
+
+
+## **Core Philosophy**
+
+The `dsk` tool is built on two guiding principles:
+
+1. **Convention over Configuration:** The tool enforces a standardized project structure and build process, eliminating boilerplate and ensuring consistency across all DSL projects.
+
+2. **Example-Driven Inference:** Instead of requiring users to write complex regular expressions, the tool infers grammar rules from simple, intuitive examples of the DSL's syntax. This dramatically lowers the barrier to entry for language design.
+
+
+## **Phase 0: `dsk` Tool Setup & Prerequisites**
+
+**Objective:** Establish the development environment and project structure for the `dsk` command-line tool itself.
+
+**Technology Stack:**
+
+- **Language:** TypeScript
+
+- **Runtime:** Bun (with Node.js as fallback)
+
+- **CLI Framework:** `commander.js`
+
+- **Core Utilities:**
+
+  - `execa`: For running shell commands (like `tree-sitter`, `gcc`, `bun`, `npm`).
+
+  - `fs-extra`: For robust file system operations.
+
+  - `chalk`: For creating colorful and readable console output.
+
+  - `chokidar`: For the file-watching system in `dsk dev`.
+
+  - `inquirer`: For handling the interactive command-line prompts.
+
+**Action Steps:**
+
+1. **Initialize Project:** Create a new Bun project for the `dsk` tool.
+
+       mkdir dsk-cli && cd dsk-cli
+       bun init -y
+
+2. **Install Dependencies:**
+
+       bun add typescript commander execa fs-extra chalk chokidar inquirer
+       bun add -d @types/node @types/fs-extra @types/inquirer
+
+3. **Configure TypeScript:** Run `bunx tsc --init` to create a `tsconfig.json` file.
+
+4. **Define Project Structure:** Create the following directory structure for the `dsk` tool's source code.
+
+       dsk-cli/
+       ├── src/
+       │   ├── commands/         # Logic for each command (new, build, etc.)
+       │   ├── utils/            # Helper functions (inference engine, shell wrappers)
+       │   └── index.ts          # Main CLI entrypoint
+       ├── templates/
+       │   ├── default/          # Base template for a new DSL project
+       │   └── js-addon/         # Template for the JS native addon
+       ├── package.json
+       └── tsconfig.json
+
+5. **Enable `dsk` Command:** In `package.json`, add a `bin` entry to link the `dsk` command and run `bun link` to make it available during development.
+
+       "bin": { "dsk": "./dist/index.js" }
+
+
+## **Phase 1: Interactive Grammar Scaffolding (`dsk new --interactive`)**
+
+**Objective:** Create a command that interactively prompts the user for examples of their DSL and generates a starter `grammar.js` file from them.
+
+
+### **User Experience & Interaction Design**
+
+This is the most critical UX component. The interaction should feel like a helpful conversation, not an interrogation.
+
+- **Greeting:** Start with a friendly welcome that explains the process.
+
+  > "Welcome to the `dsk` grammar scaffolder! I'll ask you a few questions about your new language. Just provide examples, and I'll build a starter grammar for you."
+
+- **Clear, Non-Technical Prompts:** Phrase questions simply. Instead of "Define the token for a single-line comment," use "How do you write a single-line comment? (e.g., `//` or `#`)".
+
+- **Progress Indicators:** Use `chalk` to show progress after each successful step, making the process feel tangible.
+
+  > `✔ Comments defined.` `✔ Identifiers defined.`
+
+- **Real-time Confirmation:** After inferring a pattern, always confirm with the user. This builds trust and provides an opportunity for correction.
+
+  > `? I've inferred the pattern for your identifier as: /[a-zA-Z_]\w*/. Does this look correct? (Y/n)`
+
+- **Graceful Failure & Escape Hatches:** If inference fails, don't crash. Explain the problem and offer a "power-user" alternative. The inference system is designed as a helpful starting point - users are expected to manually edit the grammar as needed.
+
+  > `I couldn't determine a pattern from those examples. Would you like to provide a custom regular expression instead? (y/N)`
+
+- **Summary & Next Steps:** Conclude the session with a summary of what was created and clear instructions on what to do next.
+
+  > "All done! Your `grammar.js` has been created with rules for: Comments, Identifiers, Numbers, Strings, and Variable Declarations. To start editing and testing, run: `dsk dev`"
+
+
+### **Action Steps**
+
+1. **Implement the Inference Engine (`src/utils/inference.ts`):**
+
+   - Create an extensible library of common token patterns (e.g., for identifiers, integers, floats, hex codes) with high-quality regular expressions. Ship with solid defaults but allow for user customization.
+
+   - Write a function `inferPattern(validExamples, invalidExamples)` that:
+
+     - Takes arrays of valid and invalid example strings.
+
+     - Iterates through the internal pattern library.
+
+     - Returns the first pattern that matches all `validExamples` and none of the `invalidExamples`.
+
+2. **Build the Interactive Command (`src/commands/new.ts`):**
+
+   - Use the `inquirer` library to implement the **Question Flow** described in the UX section.
+
+   - **Question Flow:**
+
+     **Phase A: Language Architecture & Paradigm**
+
+     - **Language Purpose:** "What is your language designed for? (e.g., configuration, scripting, domain modeling, data processing)"
+
+     - **Programming Paradigm:** "What programming style does your language follow?"
+       - Functional (immutable data, functions as first-class)
+       - Object-Oriented (classes, inheritance, methods)
+       - Procedural (step-by-step instructions, functions)
+       - Declarative (describe what, not how)
+       - Mixed (combination of above)
+
+     - **Data Philosophy:** "How does your language handle data?"
+       - Immutable by default (functional style)
+       - Mutable variables (imperative style)
+       - Mixed approach
+
+     **Phase B: Core Language Features**
+
+     - **Control Flow:** "What control structures does your language support?"
+       - Conditionals (if/else, switch/match)
+       - Loops (for, while, foreach)
+       - Pattern matching
+       - Exception handling (try/catch)
+       - Early returns/breaks
+
+     - **Data Structures:** "What built-in data structures does your language have?"
+       - Arrays/Lists: `[1, 2, 3]`
+       - Objects/Maps: `{key: value}`
+       - Tuples: `(a, b, c)`
+       - Sets: `{1, 2, 3}`
+       - Custom structures
+
+     - **Functions:** "How are functions defined in your language?"
+       - Named functions: `function foo() { ... }`
+       - Anonymous functions: `(x) => x + 1`
+       - Methods on objects: `obj.method()`
+       - First-class functions (can be passed around)
+
+     **Phase C: Syntax & Tokens**
+
+     - **Comments:** "How do you write a single-line comment? (e.g., `//` or `#`)"
+
+     - **Identifiers:** "Provide 3-5 examples of a valid identifier (e.g., `myVar _val`)." -> "Now provide 2-3 examples of an invalid identifier (e.g., `1var my-var`)." -> Use the inference engine and confirm with the user.
+
+     - **Numbers:** "Provide examples of numbers in your language (e.g., `42 3.14`)." -> Infer integer/float support.
+
+     - **Strings:** "Provide an example of a string (e.g., `"hello"` or `'world'`)." -> Infer quote style.
+
+     - **Variable Declarations:** "Show me how you declare a variable `x` with value `42`." -> Parse the example (e.g., `let x = 42;`) to identify the keyword (`let`), assignment operator (`=`), and statement terminator (`;`).
+
+     - **Concrete Examples:** Based on the paradigm answers, ask for specific examples:
+       - If OO: "Show me how you define a class with a method"
+       - If Functional: "Show me how you define and call a function"
+       - If Declarative: "Show me a typical declaration/rule in your language"
+
+   - **Grammar Assembly:**
+
+     - Collect the answers from the prompts.
+
+     - **Paradigm-Aware Generation:** Use the architectural answers to influence grammar structure:
+       - **Functional languages:** Prioritize expression-based rules, immutable bindings, function composition
+       - **OO languages:** Generate class/method structures, inheritance syntax, member access
+       - **Procedural languages:** Focus on statement sequences, mutable variables, imperative control flow
+       - **Declarative languages:** Emphasize rule definitions, pattern matching, constraint syntax
+
+     - **Feature-Driven Rules:** Generate Tree-sitter rules based on supported features:
+       - Control flow structures (if/else, loops, match expressions)
+       - Data structure literals (arrays, objects, tuples)
+       - Function definition patterns (named, anonymous, methods)
+
+     - Generate the corresponding Tree-sitter rule strings for each feature.
+
+     - Assemble the rules into a complete, valid `grammar.js` file with appropriate precedence and associativity.
+
+     - Save this generated grammar into the new project directory created by the standard `dsk new` logic.
+
+
+## **Phase 2: Core Build Process (`dsk build`)**
+
+**Objective:** Compile the `grammar.js` file into a C static library and a JavaScript package.
+
+**Action Steps:**
+
+1. **Implement the Build Command (`src/commands/build.ts`):**
+
+   - Define the `dsk build` command using `commander`.
+
+   - The command handler will orchestrate the following sub-tasks.
+
+2. **Generate Parser:** Use `execa` to run `tree-sitter generate`. This is the first step and is required by all subsequent steps.
+
+3. **Build C Library:**
+
+   - Create `generated/c/lib` and `generated/c/include` directories.
+
+   - Use `execa` to call the system's C compiler (e.g., `gcc` or `clang`) to compile `src/parser.c` into an object file.
+
+   - Use `execa` to call the system's archiver (`ar`) to bundle the object file into a static library (`lib<dsl-name>.a`).
+
+   - Generate a corresponding header file (`<dsl-name>.h`) with the function signature to access the language.
+
+4. **Build JavaScript Package:**
+
+   - Copy the template files from `dsk-cli/templates/js-addon/` into a `generated/js/` directory. This template contains:
+
+     - `package.json`: Defines the JS package with placeholder "__DSL_NAME__" for dynamic replacement, includes `"gypfile": true` and `"install": "node-gyp rebuild"` script
+
+     - `binding.gyp`: Build configuration for node-gyp, specifies sources including `../../src/parser.c` and `../../src/scanner.c`, includes N-API headers
+
+     - `bindings.cc`: C++ bridge file that exports the Tree-sitter language function using node-addon-api
+
+     - `index.js`: Simple entry point that loads the compiled addon and exports the language object
+
+   - Dynamically update the `package.json` inside `generated/js/` with the correct DSL name, replacing "__DSL_NAME__" placeholder.
+
+   - **Implement Runtime Detection:**
+
+     - Check if `bun` is available on the system's `PATH`.
+
+     - If yes, set the package manager to `bun`.
+
+     - If no, fall back to `npm`.
+
+   - Use `execa` to run `bun install` or `npm install` within the `generated/js/` directory. This triggers `node-gyp` to compile the native addon, resulting in a `.node` file.
+
+   - **Note:** The `dsk` tool itself runs on Bun, but the generated JavaScript packages support both Bun and Node.js runtimes.
+
+
+## **Phase 3: Development Workflow (`dsk dev` & `dsk test`)**
+
+**Objective:** Create a fast, iterative development loop for the DSL designer.
+
+**Action Steps:**
+
+1. **Implement Test Command (`src/commands/test.ts`):**
+
+   - Create a simple wrapper that calls `tree-sitter test` using `execa`, ensuring its output is streamed directly to the user's console.
+
+   - Include minimal integration tests for generated packages to verify basic functionality.
+
+2. **Implement Dev Command (`src/commands/dev.ts`):**
+
+   - Use the `chokidar` library to watch the `grammar.js` file for changes.
+
+   - On initial startup, run a full `dsk build` and `dsk test` cycle and display the results.
+
+   - When a change to `grammar.js` is detected:
+
+     - Log a message to the console (e.g., "Change detected. Rebuilding...").
+
+     - Trigger the `build` and `test` logic again.
+
+     - Report success or failure clearly to the user.
+
+
+## **Phase 4: Editor Integration (`dsk highlight`)**
+
+**Objective:** Generate syntax highlighting configurations for popular editors.
+
+**Action Steps:**
+
+1. **Implement Highlight Generation (`src/commands/highlight.ts`):**
+
+   - **Generate Tree-sitter Highlight Queries:** Create `highlights.scm` files by mapping grammar rules to semantic categories (keywords, strings, comments, etc.).
+
+   - **Editor Configuration Templates:** Provide configuration templates for:
+
+     - **Neovim:** Tree-sitter configuration with `highlights.scm`
+     - **Emacs:** `tree-sitter-mode` setup with language registration
+     - **Micro:** Syntax file generation using Tree-sitter queries
+     - **Helix:** Native Tree-sitter integration with `highlights.scm`
+     - **Zed:** Tree-sitter language configuration
+     - **VS Code:** TextMate grammar generation and basic language configuration
+
+   - **Automatic Rule Mapping:** Map common grammar patterns to highlight categories:
+
+     - Keywords (`if`, `let`, `function`) → `@keyword`
+     - String literals → `@string`
+     - Comments → `@comment`
+     - Numbers → `@number`
+     - Identifiers → `@variable`
+
+2. **Output Structure:** Create `generated/editors/` directory with:
+
+       generated/editors/
+       ├── tree-sitter/
+       │   └── highlights.scm          # Core highlight queries
+       ├── neovim/
+       │   └── setup-instructions.md   # Installation guide
+       ├── emacs/
+       │   └── dsl-mode.el            # Basic major mode
+       ├── micro/
+       │   └── dsl.yaml               # Micro syntax file
+       ├── helix/
+       │   └── languages.toml         # Helix language configuration
+       ├── zed/
+       │   └── config.json            # Zed language setup
+       └── vscode/
+           ├── syntaxes/
+           │   └── dsl.tmLanguage.json # TextMate grammar
+           └── language-configuration.json
+
+3. **Optional Advanced Features (Future):**
+
+   - **Basic LSP Server Template:** Tree-sitter-powered language server for symbol navigation and basic diagnostics
+   - **Custom Linter Generator:** Use Tree-sitter queries to create pattern-based linters
+   - **Grammar Documentation:** Auto-generate railroad diagrams and syntax examples
+
+**Note:** This feature leverages Tree-sitter's existing editor ecosystem, keeping implementation complexity low while providing significant value.
+
+
+## **Phase 5: Packaging & Distribution (`dsk package`)**
+
+**Objective:** Create final, distributable artifacts for the C and JavaScript targets.
+
+**Action Steps:**
+
+1. **Implement Package Command (`src/commands/package.ts`):**
+
+   - Ensure artifacts are current by first running the full `dsk build` logic.
+
+   - Create a top-level `dist/` directory.
+
+   - **Package C Library:** Use a library like `archiver` to create a `.zip` file containing the `generated/c/include` and `generated/c/lib` directories.
+
+   - **Package JS Library:**
+
+     - Run `bun pack` or `npm pack` inside the `generated/js/` directory (prefer `bun pack` when available).
+
+     - Move the resulting `.tgz` package from `generated/js/` into the top-level `dist/` directory.
+
+   - Log a success message showing the final paths of the created package files.
diff --git a/tree-sitter/dsk/PROGRESS.md b/tree-sitter/dsk/PROGRESS.md
new file mode 100644
index 0000000..1e727ee
--- /dev/null
+++ b/tree-sitter/dsk/PROGRESS.md
@@ -0,0 +1,208 @@
+# DSK Development Progress
+
+This document tracks the implementation progress of the DSL Development Kit (dsk) command-line tool.
+
+## **Project Overview**
+- **Start Date**: December 2024
+- **Current Phase**: Phase 3 - Development Workflow (In Progress)
+- **Target**: Complete CLI tool for streamlined DSL creation
+
+## **Phase Progress**
+
+### **✅ Phase 0: Tool Setup & Prerequisites** 
+**Status**: ✅ Completed  
+**Objective**: Establish development environment and project structure
+
+#### **Completed Tasks**:
+- [x] Initialize Bun project structure
+- [x] Install core dependencies (commander, execa, fs-extra, chalk, chokidar, inquirer)
+- [x] Configure TypeScript with proper output settings
+- [x] Set up project directory structure (src/commands, src/utils, templates/)
+- [x] Configure CLI binary entry point with commander.js
+- [x] Create template directories (default/, js-addon/)
+- [x] Implement basic CLI with placeholder commands
+- [x] Test CLI functionality (`dsk --version`, `dsk --help`, `dsk new`)
+- [x] Link command globally for development testing
+
+#### **Key Achievements**:
+- ✅ Working CLI tool with proper TypeScript compilation
+- ✅ All dependencies installed and configured
+- ✅ Project structure matches the plan specification
+- ✅ CLI binary (`dsk`) linked and functional
+
+---
+
+### **✅ Phase 1: Interactive Grammar Scaffolding**
+**Status**: ✅ Completed  
+**Objective**: Create paradigm-aware interactive grammar generation
+
+#### **Tasks**:
+- [x] Implement pattern inference engine (`src/utils/inference.ts`)
+- [x] Build three-phase interactive flow (Architecture → Features → Syntax)
+- [x] Create paradigm-aware grammar generation logic
+- [x] Implement project template system
+- [x] Connect interactive command to main CLI
+
+#### **Key Achievements**:
+- ✅ **Pattern Inference Engine**: 17 built-in token patterns with automatic inference from examples
+- ✅ **Interactive Question Flow**: Complete three-phase system (Architecture → Features → Syntax)
+- ✅ **Paradigm-Aware Grammar Generation**: Generates different Tree-sitter rules based on language paradigm
+- ✅ **Template Processing System**: Dynamic project generation with placeholder replacement
+- ✅ **Complete Integration**: `dsk new project --interactive` creates full DSL projects
+- ✅ **Robust Error Handling**: Graceful fallbacks when pattern inference fails
+
+#### **Generated Artifacts**:
+- **grammar.js**: Complete Tree-sitter grammar with paradigm-specific rules
+- **README.md**: Comprehensive project documentation
+- **corpus/examples.txt**: Test cases for grammar validation  
+- **examples/**: Sample programs in the new language
+- **Project structure**: Ready for Tree-sitter development workflow
+
+---
+
+### **✅ Phase 2: Core Build Process**
+**Status**: ✅ Completed  
+**Objective**: Compile grammar to C library and JS package
+
+#### **Tasks**:
+- [x] Implement `tree-sitter generate` command wrapper
+- [x] Build C static library with system compiler (gcc/clang) 
+- [x] Create JavaScript package with node-gyp compilation
+- [x] Implement bun/npm runtime detection
+- [x] Generate header files for C library
+- [x] Copy and configure JS addon templates
+
+#### **Key Achievements**:
+- ✅ **Complete Build Command**: `dsk build` with verbose mode and selective builds (--skip-c, --skip-js)
+- ✅ **Tree-sitter Integration**: Automatic parser generation with proper error handling
+- ✅ **C Library Compilation**: Full static library build with automatic compiler detection (clang/gcc/cc)
+- ✅ **Header Generation**: Proper C header files with function signatures for Tree-sitter language
+- ✅ **JavaScript Package Structure**: Complete Node.js addon setup with node-gyp configuration
+- ✅ **Runtime Detection**: Automatic bun/npm detection for package building
+- ✅ **Template System**: JS addon templates with proper binding.gyp and node.cc files
+- ✅ **Source File Management**: Automatic copying of Tree-sitter generated files to JS package
+
+#### **Generated Artifacts**:
+- **generated/c/lib/**: Static library (.a files) ready for linking
+- **generated/c/include/**: C header files for external usage
+- **generated/js/**: Complete Node.js package with native addon configuration
+- **Build verification**: Successfully tested with real grammar files
+
+#### **Notable Fixes**
+- ✅ JS addon `binding.gyp` updated to correctly include Node-API headers and set `NAPI_VERSION=8`
+- ✅ JS template now depends on `node-gyp` to avoid global requirement
+
+---
+
+### **🚧 Phase 3: Development Workflow**
+**Status**: In Progress  
+**Objective**: Implement watch mode and testing commands
+
+#### **Tasks**:
+- [x] `dsk test`: Wrapper around `tree-sitter test` with streamed output
+  - Options: `-u/--update`, `-f/--filter <regex>`, `--cwd <dir>`, `-v/--verbose`
+- [x] `dsk dev`: Watch `grammar.js`; on change run build → test with clear status
+  - Options: `--debounce <ms>` (default 150), `--quiet`, `-v/--verbose`
+
+---
+
+### **⏳ Phase 4: Editor Integration**
+**Status**: 📋 Pending  
+**Objective**: Generate syntax highlighting for Tree-sitter queries and editors: Neovim, Emacs, VS Code
+
+#### **Initial Step Completed**:
+- [x] Generate `generated/editors/tree-sitter/highlights.scm`
+- [x] Generate Neovim setup instructions
+- [x] Generate minimal Emacs major mode scaffold
+- [x] Generate VS Code TextMate grammar and language configuration
+
+---
+
+### **⏳ Phase 5: Packaging & Distribution**
+**Status**: 📋 Pending  
+**Objective**: Create distributable artifacts
+
+#### **Initial Step Completed**:
+- [x] `dsk package`: Builds, tars `generated/c`, and packs JS (`bun pack` → `npm pack` fallback)
+
+---
+
+## **Implementation Notes**
+
+### **Technology Decisions**
+- **Runtime**: Bun (primary) with Node.js fallback
+- **Language**: TypeScript for type safety and modern features
+- **CLI Framework**: commander.js for robust argument parsing
+- **File Operations**: fs-extra for enhanced file system utilities
+
+### **Architecture Decisions**
+- **Modular Commands**: Each command (new, build, dev, test, highlight, package) in separate files
+- **Extensible Inference**: Pattern library with solid defaults, user customizable
+- **Template-Driven**: Convention over configuration through project templates
+
+---
+
+## **Next Steps**
+1. Phase 3 polish:
+   - Add `--debounce <ms>` to `dsk dev` (default 150ms) and `--quiet` to suppress non-error logs
+   - Pass `--verbose` through to `dsk build` and `dsk test`
+2. Phase 4 expansion (reduced scope):
+   - Flesh out templates for Neovim, Emacs, VS Code (Tree-sitter queries based)
+3. Phase 5 completion:
+   - Include README and checksums in `dist/`
+   - Verify packed JS `.tgz` metadata (name/version)
+4. Docs:
+   - Update `dsk-cli/README.md` with new commands and examples
+
+---
+
+## **Development Log**
+
+### **2024-12-XX - Project Initialization**
+- Created PROGRESS.md to track implementation
+- Ready to begin Phase 0 implementation
+
+### **2024-12-XX - Phase 0 Complete**
+- ✅ Successfully initialized dsk-cli project with Bun
+- ✅ Installed all required dependencies (TypeScript, commander, execa, fs-extra, chalk, chokidar, inquirer)
+- ✅ Configured TypeScript with proper compilation settings
+- ✅ Created modular project structure with src/commands and src/utils
+- ✅ Implemented basic CLI entry point with commander.js
+- ✅ Added placeholder commands for all planned features
+- ✅ Successfully linked `dsk` command globally
+- ✅ Verified CLI functionality with version, help, and command testing
+
+**Next**: Ready to begin Phase 1 - Interactive Grammar Scaffolding
+
+### **2024-12-XX - Phase 1 Major Progress**
+- ✅ Implemented comprehensive pattern inference engine with 17 built-in patterns
+- ✅ Built complete three-phase interactive question flow (Architecture → Features → Syntax)
+- ✅ Created paradigm-aware prompts that adapt based on user choices
+- ✅ Integrated inference engine with graceful fallback handling
+- ✅ Connected interactive command to main CLI - `dsk new project --interactive` working
+- ✅ Verified functionality with comprehensive testing
+
+**Current**: Implementing grammar generation logic and project templates
+
+### **2024-12-XX - Phase 1 Complete!**
+- ✅ **Grammar Generation Engine**: Converts user responses into complete Tree-sitter grammar.js files
+- ✅ **Template Processing System**: Dynamic project creation with placeholder replacement
+- ✅ **Paradigm-Aware Rules**: Different grammar structures for functional, OO, procedural, and declarative languages
+- ✅ **Complete Project Generation**: Creates full DSL project structure with examples and documentation
+- ✅ **End-to-End Integration**: Interactive flow from user questions to working DSL project
+- ✅ **Verified Functionality**: Interactive command tested and working perfectly
+
+**Next**: Ready to begin Phase 2 - Core Build Process (Tree-sitter compilation, C library, JS package)
+
+### **2024-12-XX - Phase 2 Complete!**
+- ✅ **Complete Build System**: Implemented full `dsk build` command with Tree-sitter integration
+- ✅ **C Library Generation**: Automatic static library compilation with clang/gcc detection
+- ✅ **JavaScript Package**: Complete Node.js addon structure with node-gyp configuration
+- ✅ **Build Verification**: Successfully tested with real grammar files - C build works perfectly
+- ✅ **Template System**: JS addon templates with proper binding.gyp and C++ bindings
+- ✅ **Runtime Detection**: Automatic bun/npm detection and dependency management
+- ✅ **Source Management**: Automatic copying of Tree-sitter generated parser files
+
+**Status**: Phase 2 core functionality complete. Minor JavaScript compilation issue remains (napi.h include path) but build system architecture is solid and production-ready.
+
+**Next**: Ready to begin Phase 3 - Development Workflow (watch mode, testing, dev server)
diff --git a/tree-sitter/dsk/dsk-cli/.gitignore b/tree-sitter/dsk/dsk-cli/.gitignore
new file mode 100644
index 0000000..9b1ee42
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/.gitignore
@@ -0,0 +1,175 @@
+# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
+
+# Logs
+
+logs
+_.log
+npm-debug.log_
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+.pnpm-debug.log*
+
+# Caches
+
+.cache
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+
+report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
+
+# Runtime data
+
+pids
+_.pid
+_.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+
+lib-cov
+
+# Coverage directory used by tools like istanbul
+
+coverage
+*.lcov
+
+# nyc test coverage
+
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+
+bower_components
+
+# node-waf configuration
+
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+
+build/Release
+
+# Dependency directories
+
+node_modules/
+jspm_packages/
+
+# Snowpack dependency directory (https://snowpack.dev/)
+
+web_modules/
+
+# TypeScript cache
+
+*.tsbuildinfo
+
+# Optional npm cache directory
+
+.npm
+
+# Optional eslint cache
+
+.eslintcache
+
+# Optional stylelint cache
+
+.stylelintcache
+
+# Microbundle cache
+
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+
+.node_repl_history
+
+# Output of 'npm pack'
+
+*.tgz
+
+# Yarn Integrity file
+
+.yarn-integrity
+
+# dotenv environment variable files
+
+.env
+.env.development.local
+.env.test.local
+.env.production.local
+.env.local
+
+# parcel-bundler cache (https://parceljs.org/)
+
+.parcel-cache
+
+# Next.js build output
+
+.next
+out
+
+# Nuxt.js build / generate output
+
+.nuxt
+dist
+
+# Gatsby files
+
+# Comment in the public line in if your project uses Gatsby and not Next.js
+
+# https://nextjs.org/blog/next-9-1#public-directory-support
+
+# public
+
+# vuepress build output
+
+.vuepress/dist
+
+# vuepress v2.x temp and cache directory
+
+.temp
+
+# Docusaurus cache and generated files
+
+.docusaurus
+
+# Serverless directories
+
+.serverless/
+
+# FuseBox cache
+
+.fusebox/
+
+# DynamoDB Local files
+
+.dynamodb/
+
+# TernJS port file
+
+.tern-port
+
+# Stores VSCode versions used for testing VSCode extensions
+
+.vscode-test
+
+# yarn v2
+
+.yarn/cache
+.yarn/unplugged
+.yarn/build-state.yml
+.yarn/install-state.gz
+.pnp.*
+
+# IntelliJ based IDEs
+.idea
+
+# Finder (MacOS) folder config
+.DS_Store
diff --git a/tree-sitter/dsk/dsk-cli/README.md b/tree-sitter/dsk/dsk-cli/README.md
new file mode 100644
index 0000000..d07a9dd
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/README.md
@@ -0,0 +1,126 @@
+# dsk-cli
+
+DSL Development Kit (DSK) – a CLI that helps you design, build, test, and ship Tree-sitter based Domain-Specific Languages (DSLs) with a convention-over-configuration workflow.
+
+## Why this is helpful
+- **Lower barrier to entry**: Generate a working Tree-sitter `grammar.js` from examples via an interactive flow.
+- **No boilerplate**: One command builds both a C static library and a Node.js addon package.
+- **Fast iteration**: Built-in watch mode (`dsk dev`) and testing (`dsk test`).
+- **Editor-ready**: Generate Tree-sitter highlight queries and editor scaffolds (Neovim, Emacs, VS Code).
+- **Consistent outputs**: Standardized project layout and generated artifacts under `generated/` and `dist/`.
+
+## Prerequisites
+- macOS or Linux
+- [Bun](https://bun.sh) (to run `dsk`)
+- [tree-sitter CLI](https://tree-sitter.github.io/tree-sitter/creating-parsers#installation) (`npm i -g tree-sitter-cli` or `brew install tree-sitter`)
+- C toolchain: clang/gcc and `ar` (macOS: `xcode-select --install`, Ubuntu: `build-essential`)
+- For the generated JS addon builds: Python 3 and make are typically required by node-gyp (the template depends on `node-gyp`)
+
+## Install (for developing this CLI)
+```bash
+bun install
+bunx tsc
+bun link  # optional: exposes `dsk` on your PATH during development
+```
+
+## Quickstart (creating a DSL)
+```bash
+# 1) Scaffold a new DSL project interactively
+dsk new my-lang --interactive
+
+# 2) Move into the project directory
+cd my-lang
+
+# 3) Build (generates parser, C lib, and JS addon)
+dsk build
+
+# 4) Run tests (expects corpus in `corpus/` or queries)
+dsk test -v
+
+# 5) Iterate with watch mode
+dsk dev --debounce 150
+
+# 6) Generate editor highlighting scaffolds
+dsk highlight
+
+# 7) Package outputs for distribution
+dsk package
+```
+
+## Command reference
+
+### `dsk new <name> [--interactive]`
+Creates a new DSL project. With `--interactive`, it asks for examples and infers common token patterns, then generates a starter `grammar.js` and project structure using templates.
+
+### `dsk build` [--verbose] [--skip-c] [--skip-js]
+Builds your DSL:
+- Runs `tree-sitter generate`
+- Builds a C static library and header under `generated/c/`
+- Creates a Node.js addon package under `generated/js/` (compiles via node-gyp)
+
+Outputs:
+- `generated/c/include/<dsl>.h`
+- `generated/c/lib/lib<dsl>.a`
+- `generated/js/` (package.json, binding.gyp, compiled addon, copied `src/`)
+
+### `dsk test` [-u|--update] [-f|--filter <regex>] [--cwd <dir>] [-v|--verbose] [patterns...]
+Thin wrapper around `tree-sitter test` that streams output directly.
+- `--update`: updates expected output snapshots
+- `--filter <regex>`: only run tests whose description matches
+- `--cwd <dir>`: run tests in a different directory
+- `--verbose`: pass through verbose output
+
+Examples:
+```bash
+dsk test
+dsk test -v -f numbers
+dsk test -u corpus/examples.txt
+```
+
+### `dsk dev` [--debounce <ms>] [--quiet] [-v|--verbose]
+Watches `grammar.js` and on changes runs `dsk build` then `dsk test`.
+- `--debounce <ms>`: delay rebuild after changes (default: 150)
+- `--quiet`: suppress non-error logs
+- `--verbose`: pass to `dsk build` (more detailed build logs)
+
+### `dsk highlight`
+Generates highlighting assets and editor scaffolds:
+- `generated/editors/tree-sitter/highlights.scm`
+- `generated/editors/neovim/setup-instructions.md`
+- `generated/editors/emacs/<lang>-mode.el`
+- `generated/editors/vscode/syntaxes/<lang>.tmLanguage.json`
+- `generated/editors/vscode/language-configuration.json`
+
+### `dsk package`
+Packages build outputs into `dist/`:
+- Archives `generated/c/` as `dist/c-artifacts.tar.gz`
+- Packs the JS addon under `generated/js/` via `bun pack` (falls back to `npm pack`)
+
+### `dsk self:package`
+Builds and packages the `dsk` CLI itself into a tarball under `release/`.
+```bash
+dsk self:package
+# → release/dsk-cli-<version>.tgz
+```
+
+## Project structure (generated DSL project)
+- `grammar.js`: Tree-sitter grammar
+- `src/`: generated parser sources after `tree-sitter generate`
+- `corpus/`: test cases for `tree-sitter test`
+- `generated/`: build outputs (C and JS targets)
+- `generated/editors/`: highlight queries and editor scaffolds
+- `dist/`: packaged artifacts
+
+## Notes about the JS addon template
+- Depends on `node-addon-api` and `node-gyp` (declared in template `package.json`)
+- `binding.gyp` includes Node-API headers and defines `NAPI_VERSION=8`
+- Builds via `npm install` or `bun install` in `generated/js/` (auto-detected)
+
+## Troubleshooting
+- `tree-sitter` not found: install with `npm i -g tree-sitter-cli` or `brew install tree-sitter`
+- C compiler not found: macOS `xcode-select --install`; Linux install `build-essential`
+- node-gyp build issues: ensure Python 3 and make are available; re-run `dsk build -v`
+
+---
+
+This project was created using `bun init` in bun v1.1.29. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.
diff --git a/tree-sitter/dsk/dsk-cli/bun.lockb b/tree-sitter/dsk/dsk-cli/bun.lockb
new file mode 100755
index 0000000..31f4569
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/bun.lockb
Binary files differdiff --git a/tree-sitter/dsk/dsk-cli/package.json b/tree-sitter/dsk/dsk-cli/package.json
new file mode 100644
index 0000000..ef6f9fd
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/package.json
@@ -0,0 +1,33 @@
+{
+  "name": "dsk-cli",
+  "version": "0.1.0",
+  "description": "DSL Development Kit - Command-line tool for creating Domain-Specific Languages",
+  "main": "dist/index.js",
+  "module": "index.ts",
+  "type": "module",
+  "bin": {
+    "dsk": "./dist/index.js"
+  },
+  "scripts": {
+    "build": "bunx tsc",
+    "dev": "bun run --watch src/index.ts",
+    "start": "bun run dist/index.js"
+  },
+  "devDependencies": {
+    "@types/bun": "latest",
+    "@types/fs-extra": "^11.0.4",
+    "@types/inquirer": "^9.0.9",
+    "@types/node": "^24.2.1"
+  },
+  "peerDependencies": {
+    "typescript": "^5.9.2"
+  },
+  "dependencies": {
+    "chalk": "^5.5.0",
+    "chokidar": "^4.0.3",
+    "commander": "^14.0.0",
+    "execa": "^9.6.0",
+    "fs-extra": "^11.3.1",
+    "inquirer": "^12.9.1"
+  }
+}
\ No newline at end of file
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/build.ts b/tree-sitter/dsk/dsk-cli/src/commands/build.ts
new file mode 100644
index 0000000..811788f
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/build.ts
@@ -0,0 +1,429 @@
+/**
+ * Build Command - Core Build Process
+ * 
+ * Compiles grammar.js into C static library and JavaScript package
+ */
+
+import { Command } from 'commander';
+import { execa } from 'execa';
+import chalk from 'chalk';
+import { existsSync, mkdirSync, writeFileSync, copyFileSync, readFileSync, cpSync } from 'fs';
+import { join, dirname, basename } from 'path';
+import { fileURLToPath } from 'url';
+
+/**
+ * Create the build command
+ */
+export function createBuildCommand(): Command {
+  const buildCommand = new Command('build');
+  
+  buildCommand
+    .description('Build the DSL parser and packages')
+    .option('-v, --verbose', 'Show detailed build output')
+    .option('--skip-c', 'Skip C library generation')
+    .option('--skip-js', 'Skip JavaScript package generation')
+    .action(async (options) => {
+      console.log(chalk.blue('🏗️  Building DSL parser and packages...'));
+      console.log();
+      
+      try {
+        await runBuildProcess(options);
+        
+        console.log();
+        console.log(chalk.green('✅ Build completed successfully!'));
+        console.log();
+        console.log(chalk.blue('Generated artifacts:'));
+        console.log(`  ${chalk.gray('•')} C library: generated/c/lib/`);
+        console.log(`  ${chalk.gray('•')} C headers: generated/c/include/`);
+        console.log(`  ${chalk.gray('•')} JS package: generated/js/`);
+        
+      } catch (error) {
+        console.error(chalk.red('❌ Build failed:'), error instanceof Error ? error.message : error);
+        process.exit(1);
+      }
+    });
+
+  return buildCommand;
+}
+
+/**
+ * Run the complete build process
+ */
+async function runBuildProcess(options: { verbose?: boolean; skipC?: boolean; skipJs?: boolean }): Promise<void> {
+  // Verify we're in a DSL project directory
+  if (!existsSync('grammar.js')) {
+    throw new Error('No grammar.js found. Are you in a DSL project directory?');
+  }
+
+  // Ensure CommonJS semantics locally so tree-sitter can load grammar.js (uses module.exports)
+  ensureCommonJsPackageJson();
+
+  // Step 1: Generate parser with Tree-sitter
+  console.log(chalk.blue('1️⃣  Generating parser with Tree-sitter...'));
+  await generateParser(options.verbose);
+  console.log(chalk.green('   ✅ Parser generated'));
+
+  // Step 2: Build C library (unless skipped)
+  if (!options.skipC) {
+    console.log(chalk.blue('2️⃣  Building C static library...'));
+    await buildCLibrary(options.verbose);
+    console.log(chalk.green('   ✅ C library built'));
+  } else {
+    console.log(chalk.yellow('   ⏭️  Skipping C library build'));
+  }
+
+  // Step 3: Build JavaScript package (unless skipped)
+  if (!options.skipJs) {
+    console.log(chalk.blue('3️⃣  Building JavaScript package...'));
+    await buildJavaScriptPackage(options.verbose);
+    console.log(chalk.green('   ✅ JavaScript package built'));
+  } else {
+    console.log(chalk.yellow('   ⏭️  Skipping JavaScript package build'));
+  }
+}
+
+/**
+ * Step 1: Generate parser using tree-sitter generate
+ */
+async function generateParser(verbose?: boolean): Promise<void> {
+  try {
+    // Check if tree-sitter CLI is available
+    await checkTreeSitterAvailable();
+    
+    // Run tree-sitter generate
+    const result = await execa('tree-sitter', ['generate'], {
+      stdio: verbose ? 'inherit' : 'pipe'
+    });
+    
+    if (!verbose && result.stdout) {
+      console.log(chalk.gray(`   ${result.stdout.split('\n').slice(-2, -1)[0] || 'Generated successfully'}`));
+    }
+    
+  } catch (error: any) {
+    if (error.command) {
+      throw new Error(`Tree-sitter generation failed: ${error.message}`);
+    }
+    throw error;
+  }
+}
+
+/**
+ * Step 2: Build C static library
+ */
+async function buildCLibrary(verbose?: boolean): Promise<void> {
+  // Create output directories
+  const libDir = 'generated/c/lib';
+  const includeDir = 'generated/c/include';
+  
+  mkdirSync(libDir, { recursive: true });
+  mkdirSync(includeDir, { recursive: true });
+
+  // Get project name from grammar.js or directory
+  const projectName = getProjectName();
+  
+  // Compile parser.c to object file
+  const objectFile = join(libDir, `${projectName}.o`);
+  const libraryFile = join(libDir, `lib${projectName}.a`);
+  
+  try {
+    // Detect C compiler
+    const compiler = await detectCCompiler();
+    console.log(chalk.gray(`   Using compiler: ${compiler}`));
+    
+    // Compile to object file
+    const compileArgs = [
+      '-c',                    // Compile only, don't link
+      '-fPIC',                 // Position independent code
+      '-O2',                   // Optimize
+      'src/parser.c',          // Input file
+      '-o', objectFile         // Output file
+    ];
+    
+    await execa(compiler, compileArgs, {
+      stdio: verbose ? 'inherit' : 'pipe'
+    });
+    
+    // Create static library with ar
+    const arArgs = [
+      'rcs',                   // Create archive, insert files, write symbol table
+      libraryFile,             // Output library
+      objectFile               // Input object file
+    ];
+    
+    await execa('ar', arArgs, {
+      stdio: verbose ? 'inherit' : 'pipe'
+    });
+    
+    // Generate header file
+    await generateHeaderFile(projectName, includeDir);
+    
+    console.log(chalk.gray(`   Library: ${libraryFile}`));
+    console.log(chalk.gray(`   Header: ${join(includeDir, projectName + '.h')}`));
+    
+  } catch (error: any) {
+    throw new Error(`C library build failed: ${error.message}`);
+  }
+}
+
+/**
+ * Step 3: Build JavaScript package
+ */
+async function buildJavaScriptPackage(verbose?: boolean): Promise<void> {
+  const jsDir = 'generated/js';
+  
+  // Create JS package directory
+  mkdirSync(jsDir, { recursive: true });
+  
+  // Copy JS addon templates
+  await copyJSAddonTemplates(jsDir);
+  
+  // Copy src directory from tree-sitter generation
+  await copySourceFiles(jsDir);
+  
+  // Update binding.gyp based on available files
+  await updateBindingGyp(jsDir);
+  
+  // Update package.json with project name
+  await updateJSPackageJson(jsDir);
+  
+  // Detect runtime and install dependencies
+  const runtime = await detectRuntime();
+  console.log(chalk.gray(`   Using runtime: ${runtime}`));
+  
+  try {
+    // Install dependencies and build native addon
+    if (runtime === 'bun') {
+      await execa('bun', ['install'], {
+        cwd: jsDir,
+        stdio: verbose ? 'inherit' : 'pipe'
+      });
+    } else {
+      await execa('npm', ['install'], {
+        cwd: jsDir,
+        stdio: verbose ? 'inherit' : 'pipe'
+      });
+    }
+    
+    console.log(chalk.gray(`   Package: ${jsDir}/`));
+    
+  } catch (error: any) {
+    throw new Error(`JavaScript package build failed: ${error.message}`);
+  }
+}
+
+/**
+ * Check if tree-sitter CLI is available
+ */
+async function checkTreeSitterAvailable(): Promise<void> {
+  try {
+    await execa('tree-sitter', ['--version'], { stdio: 'pipe' });
+  } catch (error) {
+    throw new Error(
+      'tree-sitter CLI not found. Please install it:\n' +
+      '  npm install -g tree-sitter-cli\n' +
+      '  # or\n' +
+      '  brew install tree-sitter'
+    );
+  }
+}
+
+/**
+ * Detect available C compiler
+ */
+async function detectCCompiler(): Promise<string> {
+  const compilers = ['clang', 'gcc', 'cc'];
+  
+  for (const compiler of compilers) {
+    try {
+      await execa(compiler, ['--version'], { stdio: 'pipe' });
+      return compiler;
+    } catch {
+      // Try next compiler
+    }
+  }
+  
+  throw new Error(
+    'No C compiler found. Please install one:\n' +
+    '  macOS: xcode-select --install\n' +
+    '  Linux: sudo apt install build-essential (Ubuntu) or equivalent'
+  );
+}
+
+/**
+ * Detect runtime (bun vs npm)
+ */
+async function detectRuntime(): Promise<'bun' | 'npm'> {
+  try {
+    await execa('bun', ['--version'], { stdio: 'pipe' });
+    return 'bun';
+  } catch {
+    return 'npm';
+  }
+}
+
+/**
+ * Get project name from grammar.js or directory name
+ */
+function getProjectName(): string {
+  try {
+    const grammarContent = readFileSync('grammar.js', 'utf-8');
+    const nameMatch = grammarContent.match(/name:\s*['"]([^'"]+)['"]/);
+    if (nameMatch) {
+      return nameMatch[1];
+    }
+  } catch {
+    // Fall back to directory name
+  }
+  
+  return basename(process.cwd());
+}
+
+/**
+ * Ensure the current directory is treated as CommonJS for Node resolution,
+ * so that tree-sitter can load `grammar.js` (which uses module.exports).
+ * If no local package.json exists, create a minimal one with "type": "commonjs".
+ */
+function ensureCommonJsPackageJson(): void {
+  const packageJsonPath = 'package.json';
+  if (!existsSync(packageJsonPath)) {
+    const projectName = basename(process.cwd());
+    const minimal = {
+      name: `${projectName}-dsl`,
+      private: true,
+      type: 'commonjs',
+      description: `DSL project for ${projectName} generated by DSK`,
+      license: 'MIT'
+    } as const;
+    writeFileSync(packageJsonPath, JSON.stringify(minimal, null, 2));
+    console.log(chalk.gray('   Created local package.json with { "type": "commonjs" }'));
+  }
+}
+
+/**
+ * Generate C header file
+ */
+async function generateHeaderFile(projectName: string, includeDir: string): Promise<void> {
+  const headerContent = `#ifndef TREE_SITTER_${projectName.toUpperCase()}_H_
+#define TREE_SITTER_${projectName.toUpperCase()}_H_
+
+typedef struct TSLanguage TSLanguage;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+const TSLanguage *tree_sitter_${projectName}(void);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // TREE_SITTER_${projectName.toUpperCase()}_H_
+`;
+
+  writeFileSync(join(includeDir, `${projectName}.h`), headerContent);
+}
+
+/**
+ * Copy JS addon template files
+ */
+async function copyJSAddonTemplates(jsDir: string): Promise<void> {
+  const __filename = fileURLToPath(import.meta.url);
+  const __dirname = dirname(__filename);
+  const templateDir = join(__dirname, '..', '..', 'templates', 'js-addon');
+  
+  if (!existsSync(templateDir)) {
+    throw new Error(`JS addon template directory not found: ${templateDir}`);
+  }
+  
+  // Copy all template files
+  const { processTemplate } = await import('../utils/template-processor.js');
+  const projectName = getProjectName();
+  
+  const templateContext = {
+    architecture: { 
+      name: projectName, 
+      paradigm: 'mixed' as const, 
+      purpose: 'DSL', 
+      dataPhilosophy: 'mixed' as const 
+    },
+    features: { controlFlow: [], dataStructures: [], functionTypes: [] },
+    syntax: {
+      comments: { type: 'line_comment', pattern: '//' },
+      identifiers: { pattern: '[a-zA-Z_][a-zA-Z0-9_]*', examples: ['identifier'] },
+      numbers: { pattern: '\\d+', examples: ['42'] },
+      strings: { pattern: '"[^"]*"', examples: ['"string"'] },
+      variables: { keyword: 'let', operator: '=', terminator: ';', example: 'let x = 42;' },
+      paradigmExamples: {}
+    }
+  };
+  
+  processTemplate(templateDir, jsDir, templateContext);
+}
+
+/**
+ * Copy source files from tree-sitter generation to JS package
+ */
+async function copySourceFiles(jsDir: string): Promise<void> {
+  const srcDir = 'src';
+  const targetSrcDir = join(jsDir, 'src');
+  
+  if (!existsSync(srcDir)) {
+    throw new Error('src/ directory not found. Run tree-sitter generate first.');
+  }
+  
+  // Copy the entire src directory
+  cpSync(srcDir, targetSrcDir, { recursive: true });
+  
+  console.log(chalk.gray(`   Copied src files to ${targetSrcDir}`));
+}
+
+/**
+ * Update binding.gyp based on available source files
+ */
+async function updateBindingGyp(jsDir: string): Promise<void> {
+  const bindingGypPath = join(jsDir, 'binding.gyp');
+  const scannerPath = join(jsDir, 'src', 'scanner.c');
+  
+  if (existsSync(bindingGypPath)) {
+    let bindingGyp = JSON.parse(readFileSync(bindingGypPath, 'utf-8'));
+    
+    // Add scanner.c if it exists
+    if (existsSync(scannerPath)) {
+      const target = bindingGyp.targets[0];
+      if (!target.sources.includes('src/scanner.c')) {
+        target.sources.push('src/scanner.c');
+        console.log(chalk.gray(`   Added scanner.c to build`));
+      }
+    } else {
+      console.log(chalk.gray(`   No scanner.c found, skipping`));
+    }
+    
+    writeFileSync(bindingGypPath, JSON.stringify(bindingGyp, null, 2));
+  }
+}
+
+/**
+ * Update package.json in JS package
+ */
+async function updateJSPackageJson(jsDir: string): Promise<void> {
+  const projectName = getProjectName();
+  const packageJsonPath = join(jsDir, 'package.json');
+  
+  if (existsSync(packageJsonPath)) {
+    let packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
+    
+    // Update name and description
+    packageJson.name = `tree-sitter-${projectName}`;
+    packageJson.description = `Tree-sitter parser for ${projectName}`;
+    
+    // Update keywords
+    if (packageJson.keywords && Array.isArray(packageJson.keywords)) {
+      packageJson.keywords = packageJson.keywords.map((keyword: string) => 
+        keyword === '__DSL_NAME__' ? projectName : keyword
+      );
+    }
+    
+    writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
+  }
+}
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/dev.ts b/tree-sitter/dsk/dsk-cli/src/commands/dev.ts
new file mode 100644
index 0000000..92b2867
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/dev.ts
@@ -0,0 +1,105 @@
+/**
+ * Dev Command - Watch grammar.js and rebuild/test on change
+ */
+
+import { Command } from 'commander';
+import chokidar from 'chokidar';
+import chalk from 'chalk';
+import { existsSync } from 'fs';
+import { execa } from 'execa';
+
+/**
+ * Create the dev command
+ */
+export function createDevCommand(): Command {
+  const devCommand = new Command('dev');
+
+  devCommand
+    .description('Watch grammar.js and run build + test on changes')
+    .option('-v, --verbose', 'Show detailed build output')
+    .option('--quiet', 'Suppress non-error logs')
+    .option('--debounce <ms>', 'Debounce delay in milliseconds', '150')
+    .action(async (options) => {
+      if (!existsSync('grammar.js')) {
+        console.error(chalk.red('❌ No grammar.js found. Are you in a DSL project directory?'));
+        process.exit(1);
+      }
+
+      const verbose: boolean = Boolean(options.verbose);
+      const quiet: boolean = Boolean(options.quiet);
+      const debounceMs: number = Number.parseInt(options.debounce, 10) || 150;
+
+      if (!quiet) {
+        console.log(chalk.blue('👀 Watching grammar.js for changes...'));
+      }
+
+      // Initial build + test
+      await runBuildAndTest({ verbose, quiet });
+
+      // Watcher with debounced rebuilds
+      const watcher = chokidar.watch('grammar.js', { ignoreInitial: true });
+      let isRunning = false;
+      let rerunRequested = false;
+      let debounceTimer: NodeJS.Timeout | null = null;
+
+      const runOnce = async () => {
+        if (isRunning) {
+          rerunRequested = true;
+          return;
+        }
+        isRunning = true;
+        if (!quiet) {
+          console.log(chalk.yellow('↻ Change detected. Rebuilding...'));
+        }
+        try {
+          await runBuildAndTest({ verbose, quiet });
+          if (!quiet) {
+            console.log(chalk.green('✅ Rebuild and tests completed.'));
+          }
+        } catch (e) {
+          // Errors already printed by build/test
+        } finally {
+          isRunning = false;
+          if (rerunRequested) {
+            rerunRequested = false;
+            runOnce();
+          }
+        }
+      };
+
+      const debouncedTrigger = () => {
+        if (debounceTimer) clearTimeout(debounceTimer);
+        debounceTimer = setTimeout(runOnce, debounceMs);
+      };
+
+      watcher.on('change', debouncedTrigger);
+    });
+
+  return devCommand;
+}
+
+async function runBuildAndTest(opts: { verbose?: boolean; quiet?: boolean }): Promise<void> {
+  const { verbose, quiet } = opts;
+  if (!quiet) {
+    console.log(chalk.blue('🏗️  Building...'));
+  }
+  try {
+    const buildArgs = ['build', ...(verbose ? ['--verbose'] : [])];
+    await execa('dsk', buildArgs, { stdio: 'inherit' });
+  } catch (error: any) {
+    console.error(chalk.red('❌ Build failed. Fix errors and save again.'));
+    throw error;
+  }
+
+  if (!quiet) {
+    console.log(chalk.blue('🧪 Testing...'));
+  }
+  try {
+    await execa('dsk', ['test'], { stdio: 'inherit' });
+  } catch (error: any) {
+    console.error(chalk.red('❌ Tests failed. Fix tests and save again.'));
+    throw error;
+  }
+}
+
+
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/highlight.ts b/tree-sitter/dsk/dsk-cli/src/commands/highlight.ts
new file mode 100644
index 0000000..fd0d419
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/highlight.ts
@@ -0,0 +1,141 @@
+/**
+ * Highlight Command - Generate Tree-sitter highlights and editor scaffolds
+ */
+
+import { Command } from 'commander';
+import chalk from 'chalk';
+import { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';
+import { join } from 'path';
+
+/**
+ * Create the highlight command
+ */
+export function createHighlightCommand(): Command {
+  const highlightCommand = new Command('highlight');
+
+  highlightCommand
+    .description('Generate Tree-sitter highlights and editor scaffolds (tree-sitter, neovim, emacs, vscode)')
+    .action(async () => {
+      if (!existsSync('grammar.js')) {
+        console.error(chalk.red('❌ No grammar.js found. Are you in a DSL project directory?'));
+        process.exit(1);
+      }
+
+      const languageName = getProjectName();
+
+      // Tree-sitter highlights
+      const tsOutDir = 'generated/editors/tree-sitter';
+      mkdirSync(tsOutDir, { recursive: true });
+      const highlights = `
+; Minimal highlights scaffold. Extend as needed.
+
+; Comments
+(comment) @comment
+
+; Strings
+(string) @string
+
+; Numbers
+(number) @number
+
+; Keywords (example)
+[("if") ("else") ("let") ("function")] @keyword
+`;
+      const tsFile = join(tsOutDir, 'highlights.scm');
+      writeFileSync(tsFile, highlights.trim() + '\n', 'utf-8');
+      console.log(chalk.green(`✅ Generated ${tsFile}`));
+
+      // Neovim instructions
+      const nvimDir = 'generated/editors/neovim';
+      mkdirSync(nvimDir, { recursive: true });
+      const nvimMd = `# Neovim setup for ${languageName}
+
+1. Copy queries to your runtimepath: queries/${languageName}/highlights.scm
+2. Configure nvim-treesitter:
+
+\`\`\`lua
+require('nvim-treesitter.configs').setup({
+  ensure_installed = {},
+  highlight = { enable = true },
+})
+\`\`\`
+`;
+      writeFileSync(join(nvimDir, 'setup-instructions.md'), nvimMd, 'utf-8');
+      console.log(chalk.green(`✅ Generated ${join(nvimDir, 'setup-instructions.md')}`));
+
+      // Emacs major mode (minimal)
+      const emacsDir = 'generated/editors/emacs';
+      mkdirSync(emacsDir, { recursive: true });
+      const emacsEl = `;;; ${languageName}-mode.el --- ${languageName} mode -*- lexical-binding: t; -*-
+
+(require 'treesit)
+
+(define-derived-mode ${languageName}-mode prog-mode "${languageName}"
+  "Major mode for ${languageName} using Tree-sitter."
+  (when (treesit-ready-p '${languageName})
+    (treesit-parser-create '${languageName})))
+
+(add-to-list 'auto-mode-alist '("\\.${languageName}$" . ${languageName}-mode))
+
+(provide '${languageName}-mode)
+`;
+      writeFileSync(join(emacsDir, `${languageName}-mode.el`), emacsEl, 'utf-8');
+      console.log(chalk.green(`✅ Generated ${join(emacsDir, `${languageName}-mode.el`)}`));
+
+      // VS Code: TextMate grammar + language configuration (basic placeholders)
+      const vscodeSyntaxDir = 'generated/editors/vscode/syntaxes';
+      const vscodeDir = 'generated/editors/vscode';
+      mkdirSync(vscodeSyntaxDir, { recursive: true });
+      mkdirSync(vscodeDir, { recursive: true });
+      const scope = `source.${languageName}`;
+      const tmLanguage = {
+        name: languageName,
+        scopeName: scope,
+        patterns: [
+          { include: '#comment' },
+          { include: '#string' },
+          { include: '#number' },
+          { include: '#keyword' }
+        ],
+        repository: {
+          comment: { patterns: [{ name: 'comment.line.double-slash', match: '//.*$' }] },
+          string: { patterns: [{ name: 'string.quoted.double', begin: '"', end: '"' }] },
+          number: { patterns: [{ name: 'constant.numeric', match: '-?\\b[0-9]+(\\.[0-9]+)?\\b' }] },
+          keyword: { patterns: [{ name: 'keyword.control', match: '\\b(if|else|let|function)\\b' }] }
+        }
+      } as const;
+      writeFileSync(join(vscodeSyntaxDir, `${languageName}.tmLanguage.json`), JSON.stringify(tmLanguage, null, 2));
+      const langConfig = {
+        comments: { lineComment: '//' },
+        brackets: [["{","}"],["[","]"],["(",")"]],
+        autoClosingPairs: [
+          { open: '"', close: '"' },
+          { open: '{', close: '}' },
+          { open: '(', close: ')' },
+          { open: '[', close: ']' }
+        ],
+        surroundingPairs: [
+          { open: '"', close: '"' },
+          { open: '{', close: '}' },
+          { open: '(', close: ')' },
+          { open: '[', close: ']' }
+        ]
+      };
+      writeFileSync(join(vscodeDir, 'language-configuration.json'), JSON.stringify(langConfig, null, 2));
+      console.log(chalk.green(`✅ Generated VS Code syntax and configuration`));
+    });
+
+  return highlightCommand;
+}
+
+function getProjectName(): string {
+  try {
+    const grammarContent = readFileSync('grammar.js', 'utf-8');
+    const nameMatch = grammarContent.match(/name:\s*['"]([^'"]+)['"]/);
+    if (nameMatch) return nameMatch[1];
+  } catch {}
+  const parts = process.cwd().split(/[\\/]/);
+  return parts[parts.length - 1] || 'dsl';
+}
+
+
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/new.ts b/tree-sitter/dsk/dsk-cli/src/commands/new.ts
new file mode 100644
index 0000000..1829a21
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/new.ts
@@ -0,0 +1,485 @@
+/**
+ * New Command - Interactive Grammar Scaffolding
+ * 
+ * Creates a new DSL project with paradigm-aware grammar generation
+ */
+
+import { Command } from 'commander';
+import inquirer from 'inquirer';
+import chalk from 'chalk';
+import { existsSync, mkdirSync, writeFileSync } from 'fs';
+import { join, dirname } from 'path';
+import { fileURLToPath } from 'url';
+import { inferPattern, validatePattern, generateCustomPattern } from '../utils/inference.js';
+import { generateGrammar, generateGrammarFile } from '../utils/grammar-generator.js';
+import { processTemplate } from '../utils/template-processor.js';
+
+// Type definitions for our interactive flow
+export interface LanguageArchitecture {
+  name: string;
+  purpose: string;
+  paradigm: 'functional' | 'object-oriented' | 'procedural' | 'declarative' | 'mixed';
+  dataPhilosophy: 'immutable' | 'mutable' | 'mixed';
+}
+
+export interface LanguageFeatures {
+  controlFlow: string[];
+  dataStructures: string[];
+  functionTypes: string[];
+}
+
+export interface LanguageSyntax {
+  comments: { type: string; pattern: string };
+  identifiers: { pattern: string; examples: string[] };
+  numbers: { pattern: string; examples: string[] };
+  strings: { pattern: string; examples: string[] };
+  variables: { keyword: string; operator: string; terminator: string; example: string };
+  paradigmExamples: { [key: string]: string };
+}
+
+/**
+ * Create the new command
+ */
+export function createNewCommand(): Command {
+  const newCommand = new Command('new');
+  
+  newCommand
+    .description('Create a new DSL project')
+    .argument('<name>', 'Name of the DSL project')
+    .option('-i, --interactive', 'Use interactive grammar scaffolding')
+    .option('-t, --template <template>', 'Use a specific template (default: basic)')
+    .action(async (name: string, options) => {
+      console.log(chalk.blue('🚀 Creating DSL project:'), chalk.bold(name));
+      
+      if (options.interactive) {
+        await runInteractiveFlow(name);
+      } else {
+        await createBasicProject(name, options.template || 'basic');
+      }
+    });
+
+  return newCommand;
+}
+
+/**
+ * Run the interactive grammar scaffolding flow
+ */
+async function runInteractiveFlow(projectName: string): Promise<void> {
+  console.log();
+  console.log(chalk.green('🎯 Welcome to the DSK Grammar Scaffolder!'));
+  console.log(chalk.gray('I\'ll ask you a few questions about your new language.'));
+  console.log(chalk.gray('Just provide examples, and I\'ll build a starter grammar for you.'));
+  console.log();
+
+  try {
+    // Phase A: Language Architecture & Paradigm
+    console.log(chalk.blue('📋 Phase A: Language Architecture & Paradigm'));
+    const architecture = await gatherArchitecture(projectName);
+    console.log(chalk.green('✔'), 'Architecture defined');
+
+    // Phase B: Core Language Features  
+    console.log();
+    console.log(chalk.blue('🔧 Phase B: Core Language Features'));
+    const features = await gatherFeatures(architecture);
+    console.log(chalk.green('✔'), 'Features defined');
+
+    // Phase C: Syntax & Tokens
+    console.log();
+    console.log(chalk.blue('🔤 Phase C: Syntax & Tokens'));
+    const syntax = await gatherSyntax(architecture, features);
+    console.log(chalk.green('✔'), 'Syntax defined');
+
+    // Generate the project
+    console.log();
+    console.log(chalk.blue('🏗️  Generating project...'));
+    await generateProject(projectName, architecture, features, syntax);
+    
+    // Success message
+    console.log();
+    console.log(chalk.green('🎉 All done!'));
+    console.log(`Your ${chalk.bold('grammar.js')} has been created with rules for:`);
+    console.log(`  ${chalk.gray('•')} Comments, Identifiers, Numbers, Strings`);
+    console.log(`  ${chalk.gray('•')} Variable Declarations`);
+    console.log(`  ${chalk.gray('•')} ${architecture.paradigm} language constructs`);
+    console.log();
+    console.log(chalk.yellow('To start editing and testing, run:'));
+    console.log(`  ${chalk.cyan('cd')} ${projectName}`);
+    console.log(`  ${chalk.cyan('dsk dev')}`);
+    console.log();
+
+  } catch (error) {
+    console.error(chalk.red('❌ Error during interactive flow:'), error);
+    process.exit(1);
+  }
+}
+
+/**
+ * Phase A: Gather language architecture information
+ */
+async function gatherArchitecture(projectName: string): Promise<LanguageArchitecture> {
+  const purposeAnswer = await inquirer.prompt({
+    type: 'input',
+    name: 'purpose',
+    message: 'What is your language designed for? (e.g., configuration, scripting, domain modeling)',
+    default: 'General purpose scripting'
+  });
+
+  const paradigmAnswer = await inquirer.prompt({
+    type: 'list',
+    name: 'paradigm',
+    message: 'What programming style does your language follow?',
+    choices: [
+      { name: 'Functional (immutable data, functions as first-class)', value: 'functional' },
+      { name: 'Object-Oriented (classes, inheritance, methods)', value: 'object-oriented' },
+      { name: 'Procedural (step-by-step instructions, functions)', value: 'procedural' },
+      { name: 'Declarative (describe what, not how)', value: 'declarative' },
+      { name: 'Mixed (combination of above)', value: 'mixed' }
+    ],
+    default: 'procedural'
+  });
+
+  const dataAnswer = await inquirer.prompt({
+    type: 'list',
+    name: 'dataPhilosophy',
+    message: 'How does your language handle data?',
+    choices: [
+      { name: 'Immutable by default (functional style)', value: 'immutable' },
+      { name: 'Mutable variables (imperative style)', value: 'mutable' },
+      { name: 'Mixed approach', value: 'mixed' }
+    ],
+    default: 'mutable'
+  });
+
+  return {
+    name: projectName,
+    purpose: purposeAnswer.purpose,
+    paradigm: paradigmAnswer.paradigm,
+    dataPhilosophy: dataAnswer.dataPhilosophy
+  };
+}
+
+/**
+ * Phase B: Gather core language features
+ */
+async function gatherFeatures(architecture: LanguageArchitecture): Promise<LanguageFeatures> {
+  const controlFlowAnswer = await inquirer.prompt({
+    type: 'checkbox',
+    name: 'controlFlow',
+    message: 'What control structures does your language support?',
+    choices: [
+      { name: 'Conditionals (if/else)', value: 'conditionals', checked: true },
+      { name: 'Loops (for, while)', value: 'loops' },
+      { name: 'Pattern matching', value: 'pattern_matching' },
+      { name: 'Exception handling (try/catch)', value: 'exceptions' },
+      { name: 'Early returns/breaks', value: 'early_returns' }
+    ]
+  });
+
+  const dataStructuresAnswer = await inquirer.prompt({
+    type: 'checkbox', 
+    name: 'dataStructures',
+    message: 'What built-in data structures does your language have?',
+    choices: [
+      { name: 'Arrays/Lists: [1, 2, 3]', value: 'arrays', checked: true },
+      { name: 'Objects/Maps: {key: value}', value: 'objects' },
+      { name: 'Tuples: (a, b, c)', value: 'tuples' },
+      { name: 'Sets: {1, 2, 3}', value: 'sets' }
+    ]
+  });
+
+  const functionTypesAnswer = await inquirer.prompt({
+    type: 'checkbox',
+    name: 'functionTypes',
+    message: 'How are functions defined in your language?',
+    choices: [
+      { name: 'Named functions: function foo() { ... }', value: 'named', checked: true },
+      { name: 'Anonymous functions: (x) => x + 1', value: 'anonymous' },
+      { name: 'Methods on objects: obj.method()', value: 'methods' },
+      { name: 'First-class functions (can be passed around)', value: 'first_class' }
+    ]
+  });
+
+  return {
+    controlFlow: controlFlowAnswer.controlFlow,
+    dataStructures: dataStructuresAnswer.dataStructures,
+    functionTypes: functionTypesAnswer.functionTypes
+  };
+}
+
+/**
+ * Phase C: Gather syntax and token information
+ */
+async function gatherSyntax(architecture: LanguageArchitecture, features: LanguageFeatures): Promise<LanguageSyntax> {
+  const syntax: Partial<LanguageSyntax> = {};
+
+  // Comments
+  const commentAnswer = await inquirer.prompt({
+    type: 'input',
+    name: 'comment',
+    message: 'How do you write a single-line comment? (e.g., //, #, --, ;)',
+    default: '//'
+  });
+
+  const commentPrefix = String(commentAnswer.comment).trim().split(/\s+/)[0];
+  syntax.comments = {
+    type: 'line_comment',
+    pattern: commentPrefix
+  };
+
+  // Identifiers with inference
+  syntax.identifiers = await gatherTokenWithInference(
+    'identifier',
+    'Provide 3-5 examples of valid identifiers',
+    'Now provide 2-3 examples of invalid identifiers (optional)',
+    ['myVar', 'userName', '_private']
+  );
+
+  // Numbers with inference
+  syntax.numbers = await gatherTokenWithInference(
+    'number',
+    'Provide examples of numbers in your language',
+    'Provide examples of invalid numbers (optional)',
+    ['42', '3.14', '-17']
+  );
+
+  // Strings with inference (treat input as a single example, not space-split)
+  const stringValid = await inquirer.prompt({
+    type: 'input',
+    name: 'examples',
+    message: 'Provide an example of a string literal',
+    default: '"hello world"'
+  });
+  const stringInvalid = await inquirer.prompt({
+    type: 'input',
+    name: 'examples',
+    message: 'Provide examples of invalid strings (optional)',
+    default: ''
+  });
+  const stringValidExamples = stringValid.examples ? [String(stringValid.examples)] : ['"hello world"'];
+  const stringInvalidExamples = stringInvalid.examples ? [String(stringInvalid.examples)] : [];
+  const stringResult = inferPattern(stringValidExamples, stringInvalidExamples);
+  if (stringResult.pattern && stringResult.confidence > 0.7) {
+    syntax.strings = { pattern: stringResult.pattern.regex.source, examples: stringValidExamples };
+  } else {
+    syntax.strings = { pattern: '"[^"]*"', examples: stringValidExamples };
+  }
+
+  // Variable declarations
+  const varAnswer = await inquirer.prompt({
+    type: 'input',
+    name: 'example',
+    message: 'Show me how you declare a variable x with value 42 (helps identify keywords)',
+    default: 'let x = 42;'
+  });
+
+  const varParts = parseVariableDeclaration(varAnswer.example);
+  syntax.variables = varParts;
+
+  // Paradigm-specific examples
+  syntax.paradigmExamples = {};
+  
+  if (architecture.paradigm === 'object-oriented' || architecture.paradigm === 'mixed') {
+    const classAnswer = await inquirer.prompt({
+      type: 'input',
+      name: 'classExample',
+      message: 'Show me how you define a class with a method',
+      default: 'class Person { getName() { return this.name; } }'
+    });
+    syntax.paradigmExamples.class = classAnswer.classExample;
+  }
+
+  if (architecture.paradigm === 'functional' || architecture.paradigm === 'mixed') {
+    const funcAnswer = await inquirer.prompt({
+      type: 'input',
+      name: 'funcExample', 
+      message: 'Show me how you define and call a function',
+      default: 'function add(a, b) { return a + b; }'
+    });
+    syntax.paradigmExamples.function = funcAnswer.funcExample;
+  }
+
+  if (architecture.paradigm === 'declarative') {
+    const ruleAnswer = await inquirer.prompt({
+      type: 'input',
+      name: 'ruleExample',
+      message: 'Show me a typical declaration/rule in your language',
+      default: 'rule user_can_edit when user.role == "admin"'
+    });
+    syntax.paradigmExamples.rule = ruleAnswer.ruleExample;
+  }
+
+  return syntax as LanguageSyntax;
+}
+
+/**
+ * Gather token information with automatic pattern inference
+ */
+async function gatherTokenWithInference(
+  tokenType: string,
+  validPrompt: string,
+  invalidPrompt: string,
+  defaultExamples: string[]
+): Promise<{ pattern: string; examples: string[] }> {
+  
+  // Get valid examples
+  const validAnswer = await inquirer.prompt({
+    type: 'input',
+    name: 'examples',
+    message: `${validPrompt} (separate with spaces)`,
+    default: defaultExamples.join(' ')
+  });
+
+  const validExamples = validAnswer.examples.split(/\s+/).filter((ex: string) => ex.length > 0);
+
+  // Get invalid examples (optional)
+  const invalidAnswer = await inquirer.prompt({
+    type: 'input',
+    name: 'examples',
+    message: `${invalidPrompt} (separate with spaces, or press Enter to skip)`,
+    default: ''
+  });
+
+  const invalidExamples = invalidAnswer.examples 
+    ? invalidAnswer.examples.split(/\s+/).filter((ex: string) => ex.length > 0)
+    : [];
+
+  // Try inference
+  const result = inferPattern(validExamples, invalidExamples);
+  
+  if (result.pattern && result.confidence > 0.7) {
+    // Successful inference - confirm with user
+    const confirmAnswer = await inquirer.prompt({
+      type: 'confirm',
+      name: 'confirmed',
+      message: `I've inferred the pattern for ${tokenType} as: ${chalk.cyan(result.pattern.regex.source)}. Does this look correct?`,
+      default: true
+    });
+
+    if (confirmAnswer.confirmed) {
+      return {
+        pattern: result.pattern.regex.source,
+        examples: validExamples
+      };
+    }
+  }
+
+  // Inference failed or user rejected - offer alternatives
+  console.log(chalk.yellow(`I couldn't determine a reliable pattern from those examples.`));
+  
+  const fallbackAnswer = await inquirer.prompt({
+    type: 'list',
+    name: 'option',
+    message: 'How would you like to proceed?',
+    choices: [
+      { name: 'Provide a custom regular expression', value: 'custom_regex' },
+      { name: 'Generate a simple pattern from examples', value: 'simple_pattern' },
+      { name: 'Try different examples', value: 'retry' }
+    ]
+  });
+
+  if (fallbackAnswer.option === 'custom_regex') {
+    const regexAnswer = await inquirer.prompt({
+      type: 'input',
+      name: 'regex',
+      message: `Enter a regular expression for ${tokenType}:`,
+      validate: (input: string) => {
+        const validation = validatePattern(input, validExamples, invalidExamples);
+        return validation.isValid || validation.errors.join(', ');
+      }
+    });
+    
+    return {
+      pattern: regexAnswer.regex,
+      examples: validExamples
+    };
+  }
+
+  if (fallbackAnswer.option === 'simple_pattern') {
+    const simplePattern = generateCustomPattern(validExamples, invalidExamples);
+    return {
+      pattern: simplePattern,
+      examples: validExamples
+    };
+  }
+
+  // Retry with different examples
+  return await gatherTokenWithInference(tokenType, validPrompt, invalidPrompt, defaultExamples);
+}
+
+/**
+ * Parse variable declaration to extract components
+ */
+function parseVariableDeclaration(example: string): { keyword: string; operator: string; terminator: string; example: string } {
+  // Simple parsing - look for common patterns
+  const patterns = [
+    /^(\w+)\s+(\w+)\s*([=:])\s*[^;]*([;]?)/, // let x = 42;
+    /^(\w+)\s*([=:])\s*[^;]*([;]?)/, // x = 42;
+  ];
+
+  for (const pattern of patterns) {
+    const match = example.match(pattern);
+    if (match) {
+      return {
+        keyword: match[1] || '',
+        operator: match[3] || '=',
+        terminator: match[4] || ';',
+        example
+      };
+    }
+  }
+
+  // Fallback
+  return {
+    keyword: 'let',
+    operator: '=', 
+    terminator: ';',
+    example
+  };
+}
+
+/**
+ * Generate the project with collected information
+ */
+async function generateProject(
+  name: string,
+  architecture: LanguageArchitecture,
+  features: LanguageFeatures,
+  syntax: LanguageSyntax
+): Promise<void> {
+  // Create project directory
+  if (existsSync(name)) {
+    throw new Error(`Directory ${name} already exists`);
+  }
+
+  mkdirSync(name, { recursive: true });
+
+  // Generate grammar.js file
+  console.log(chalk.gray('🔧 Generating grammar.js...'));
+  const grammar = generateGrammar(architecture, features, syntax);
+  const grammarContent = generateGrammarFile(grammar);
+  writeFileSync(join(name, 'grammar.js'), grammarContent, 'utf-8');
+
+  // Process template files
+  console.log(chalk.gray('📋 Creating project structure...'));
+  const __filename = fileURLToPath(import.meta.url);
+  const __dirname = dirname(__filename);
+  const templateDir = join(__dirname, '..', '..', 'templates', 'default');
+  
+  const templateContext = {
+    architecture,
+    features,
+    syntax
+  };
+  
+  processTemplate(templateDir, name, templateContext);
+  
+  console.log(chalk.gray(`📁 Created project directory: ${name}/`));
+  console.log(chalk.green('✨ Project generated successfully!'));
+}
+
+/**
+ * Create a basic project without interactive flow
+ */
+async function createBasicProject(name: string, template: string): Promise<void> {
+  console.log(chalk.yellow(`🚧 Basic project creation (template: ${template}) coming soon!`));
+}
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/package.ts b/tree-sitter/dsk/dsk-cli/src/commands/package.ts
new file mode 100644
index 0000000..515c2ce
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/package.ts
@@ -0,0 +1,66 @@
+/**
+ * Package Command - Create distributable artifacts for C and JS outputs
+ */
+
+import { Command } from 'commander';
+import chalk from 'chalk';
+import { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';
+import { join } from 'path';
+import { execa } from 'execa';
+
+/**
+ * Create the package command
+ */
+export function createPackageCommand(): Command {
+  const pkgCommand = new Command('package');
+
+  pkgCommand
+    .description('Package C and JS artifacts into distributable archives')
+    .action(async () => {
+      if (!existsSync('grammar.js')) {
+        console.error(chalk.red('❌ No grammar.js found. Are you in a DSL project directory?'));
+        process.exit(1);
+      }
+
+      console.log(chalk.blue('📦 Packaging artifacts...'));
+
+      // Ensure build is up to date
+      await execa('dsk', ['build'], { stdio: 'inherit' });
+
+      const distDir = 'dist';
+      mkdirSync(distDir, { recursive: true });
+
+      // Zip C outputs (simple tar.gz using system tar to avoid extra deps)
+      const cDir = 'generated/c';
+      if (existsSync(cDir)) {
+        const cArchive = join(distDir, 'c-artifacts.tar.gz');
+        await execa('tar', ['-czf', cArchive, '-C', 'generated', 'c'], { stdio: 'inherit' });
+        console.log(chalk.green(`✅ C artifacts: ${cArchive}`));
+      }
+
+      // Pack JS package
+      const jsDir = 'generated/js';
+      if (existsSync(jsDir)) {
+        // Prefer bun pack, fallback to npm pack
+        let pkgPath = '';
+        try {
+          const { stdout } = await execa('bun', ['pack'], { cwd: jsDir });
+          pkgPath = stdout.trim();
+        } catch {
+          const { stdout } = await execa('npm', ['pack'], { cwd: jsDir });
+          pkgPath = stdout.trim();
+        }
+        const fileName = pkgPath.split(/\s|\n/).pop() as string;
+        const srcPath = join(jsDir, fileName);
+        const destPath = join(distDir, fileName);
+        await execa('bash', ['-lc', `cp ${JSON.stringify(srcPath)} ${JSON.stringify(destPath)}`]);
+        console.log(chalk.green(`✅ JS package: ${destPath}`));
+      }
+
+      console.log(chalk.blue('🎉 Packaging complete.'));
+    });
+
+  return pkgCommand;
+}
+
+
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/self.ts b/tree-sitter/dsk/dsk-cli/src/commands/self.ts
new file mode 100644
index 0000000..2b43fd6
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/self.ts
@@ -0,0 +1,71 @@
+/**
+ * Self Packaging Command - Build and package the dsk CLI for distribution
+ */
+
+import { Command } from 'commander';
+import chalk from 'chalk';
+import { execa } from 'execa';
+import { existsSync, mkdirSync, readFileSync } from 'fs';
+import { dirname, join } from 'path';
+import { fileURLToPath } from 'url';
+
+/**
+ * Create the self:package command
+ */
+export function createSelfPackageCommand(): Command {
+  const cmd = new Command('self:package');
+
+  cmd
+    .description('Build and package the dsk CLI into a .tgz for distribution')
+    .option('-v, --verbose', 'Show detailed build output')
+    .action(async (options) => {
+      const projectRoot = resolveProjectRoot();
+      if (!existsSync(join(projectRoot, 'package.json'))) {
+        console.error(chalk.red('❌ Could not locate package.json for dsk-cli'));
+        process.exit(1);
+      }
+
+      const pkg = JSON.parse(readFileSync(join(projectRoot, 'package.json'), 'utf-8'));
+      console.log(chalk.blue(`📦 Packaging ${pkg.name}@${pkg.version}`));
+
+      // 1) Build TypeScript → dist
+      console.log(chalk.blue('🏗️  Building CLI...'));
+      await execa('bun', ['x', 'tsc'], { cwd: projectRoot, stdio: options.verbose ? 'inherit' : 'inherit' });
+
+      // 2) Pack npm tarball using bun pack (fallback to npm pack)
+      console.log(chalk.blue('🧰 Creating package tarball...'));
+      let tgzName = '';
+      try {
+        const { stdout } = await execa('bun', ['pack'], { cwd: projectRoot });
+        tgzName = stdout.trim().split(/\s|\n/).pop() || '';
+      } catch {
+        const { stdout } = await execa('npm', ['pack'], { cwd: projectRoot });
+        tgzName = stdout.trim().split(/\s|\n/).pop() || '';
+      }
+
+      if (!tgzName) {
+        console.error(chalk.red('❌ Failed to determine generated package filename'));
+        process.exit(1);
+      }
+
+      const releaseDir = join(projectRoot, 'release');
+      mkdirSync(releaseDir, { recursive: true });
+      const src = join(projectRoot, tgzName);
+      const dest = join(releaseDir, tgzName);
+      await execa('bash', ['-lc', `mv -f ${JSON.stringify(src)} ${JSON.stringify(dest)}`]);
+
+      console.log(chalk.green(`✅ Created ${dest}`));
+    });
+
+  return cmd;
+}
+
+function resolveProjectRoot(): string {
+  const __filename = fileURLToPath(import.meta.url);
+  const __dirname = dirname(__filename);
+  // When compiled, this file lives under dist/commands. Project root is two levels up.
+  const candidate = join(__dirname, '..', '..');
+  return candidate;
+}
+
+
diff --git a/tree-sitter/dsk/dsk-cli/src/commands/test.ts b/tree-sitter/dsk/dsk-cli/src/commands/test.ts
new file mode 100644
index 0000000..694acc7
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/commands/test.ts
@@ -0,0 +1,50 @@
+/**
+ * Test Command - Wrapper around `tree-sitter test`
+ *
+ * Streams test output directly to the console.
+ */
+
+import { Command } from 'commander';
+import { execa } from 'execa';
+import chalk from 'chalk';
+import { existsSync } from 'fs';
+
+/**
+ * Create the test command
+ */
+export function createTestCommand(): Command {
+  const testCommand = new Command('test');
+
+  testCommand
+    .description('Run tree-sitter tests and stream output')
+    .allowExcessArguments(true)
+    .option('-u, --update', 'Update expected outputs (snapshots)')
+    .option('-f, --filter <regex>', 'Only run tests whose descriptions match the regex')
+    .option('--cwd <dir>', 'Run tests with a different working directory')
+    .option('-v, --verbose', 'Show verbose output (passes through to tree-sitter)')
+    .argument('[patterns...]', 'Optional test patterns (filenames or test names)')
+    .action(async (patterns: string[], options) => {
+      if (!existsSync('grammar.js')) {
+        console.error(chalk.red('❌ No grammar.js found. Are you in a DSL project directory?'));
+        process.exit(1);
+      }
+
+      try {
+        console.log(chalk.blue('🧪 Running tests...'));
+        const args = ['test'];
+        if (options.update) args.push('-u');
+        if (options.filter) args.push('-f', String(options.filter));
+        if (options.verbose) args.push('--verbose');
+        args.push(...patterns);
+        await execa('tree-sitter', args, { stdio: 'inherit', cwd: options.cwd || process.cwd() });
+      } catch (error: any) {
+        const message = error?.message || error || 'Unknown error';
+        console.error(chalk.red('❌ Tests failed:'), message);
+        process.exit(1);
+      }
+    });
+
+  return testCommand;
+}
+
+
diff --git a/tree-sitter/dsk/dsk-cli/src/index.ts b/tree-sitter/dsk/dsk-cli/src/index.ts
new file mode 100644
index 0000000..42c75c5
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/index.ts
@@ -0,0 +1,80 @@
+#!/usr/bin/env node
+
+/**
+ * DSK - DSL Development Kit
+ * Command-line tool for creating Domain-Specific Languages with Tree-sitter
+ */
+
+import { Command } from 'commander';
+import chalk from 'chalk';
+import { readFileSync } from 'fs';
+import { fileURLToPath } from 'url';
+import { dirname, join } from 'path';
+
+// Get package.json for version info
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+const packagePath = join(__dirname, '..', 'package.json');
+const packageJson = JSON.parse(readFileSync(packagePath, 'utf-8'));
+
+const program = new Command();
+
+// Configure main command
+program
+  .name('dsk')
+  .description('DSL Development Kit - Streamline Domain-Specific Language creation')
+  .version(packageJson.version);
+
+// Welcome message for help
+program.on('--help', () => {
+  console.log('');
+  console.log(chalk.blue('Getting Started:'));
+  console.log('  $ dsk new my-language --interactive');
+  console.log('  $ cd my-language');
+  console.log('  $ dsk dev');
+  console.log('');
+  console.log(chalk.yellow('Learn more at: https://github.com/your-org/dsk'));
+});
+
+// Import and register commands
+import { createNewCommand } from './commands/new.js';
+import { createBuildCommand } from './commands/build.js';
+import { createTestCommand } from './commands/test.js';
+import { createDevCommand } from './commands/dev.js';
+import { createHighlightCommand } from './commands/highlight.js';
+import { createPackageCommand } from './commands/package.js';
+import { createSelfPackageCommand } from './commands/self.js';
+
+const newCommand = createNewCommand();
+const buildCommand = createBuildCommand();
+const testCommand = createTestCommand();
+const devCommand = createDevCommand();
+const highlightCommand = createHighlightCommand();
+const packageCommand = createPackageCommand();
+const selfPackageCommand = createSelfPackageCommand();
+
+program.addCommand(newCommand);
+program.addCommand(buildCommand);
+program.addCommand(testCommand);
+program.addCommand(devCommand);
+program.addCommand(highlightCommand);
+program.addCommand(packageCommand);
+program.addCommand(selfPackageCommand);
+
+// TODO: Import remaining commands
+// import { devCommand } from './commands/dev.js';
+// import { testCommand } from './commands/test.js';
+// import { highlightCommand } from './commands/highlight.js';
+// import { packageCommand } from './commands/package.js';
+
+// program.addCommand(devCommand);
+// program.addCommand(testCommand);
+// program.addCommand(highlightCommand);
+// program.addCommand(packageCommand);
+
+
+
+// legacy placeholder removed; real dev command registered above
+
+// Parse command line arguments
+program.parse();
diff --git a/tree-sitter/dsk/dsk-cli/src/utils/grammar-generator.ts b/tree-sitter/dsk/dsk-cli/src/utils/grammar-generator.ts
new file mode 100644
index 0000000..1b40eff
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/utils/grammar-generator.ts
@@ -0,0 +1,466 @@
+/**
+ * Grammar Generator
+ * 
+ * Converts collected user information into Tree-sitter grammar.js files
+ * with paradigm-aware rule generation
+ */
+
+import { LanguageArchitecture, LanguageFeatures, LanguageSyntax } from '../commands/new.js';
+
+export interface GrammarRule {
+  name: string;
+  definition: string;
+  comment?: string;
+}
+
+export interface GeneratedGrammar {
+  name: string;
+  rules: GrammarRule[];
+  extras: string[];
+  conflicts: string[][];
+  precedences: string[][];
+  word?: string;
+}
+
+/**
+ * Generate a complete Tree-sitter grammar from user specifications
+ */
+export function generateGrammar(
+  architecture: LanguageArchitecture,
+  features: LanguageFeatures,
+  syntax: LanguageSyntax
+): GeneratedGrammar {
+  const rules: GrammarRule[] = [];
+  const extras: string[] = [];
+  
+  // Start with the root rule - this varies by paradigm
+  rules.push(generateRootRule(architecture, features));
+  
+  // Add basic token rules
+  rules.push(...generateTokenRules(syntax));
+  
+  // Add paradigm-specific rules
+  rules.push(...generateParadigmRules(architecture, features, syntax));
+  
+  // Add data structure rules if specified
+  if (features.dataStructures.length > 0) {
+    rules.push(...generateDataStructureRules(features.dataStructures));
+  }
+  
+  // Add control flow rules if specified
+  if (features.controlFlow.length > 0) {
+    rules.push(...generateControlFlowRules(features.controlFlow, syntax));
+  }
+  
+  // Set up extras (whitespace and comments)
+  extras.push('/\\s/', `$.${getCommentRuleName(syntax.comments.pattern)}`);
+  
+  return {
+    name: architecture.name,
+    rules,
+    extras,
+    conflicts: [], // TODO: Add conflicts if needed
+    precedences: generatePrecedences(architecture, features),
+    word: 'identifier' // Most languages use identifier as word token
+  };
+}
+
+/**
+ * Generate the root rule based on language paradigm
+ */
+function generateRootRule(architecture: LanguageArchitecture, features: LanguageFeatures): GrammarRule {
+  let definition: string;
+  
+  switch (architecture.paradigm) {
+    case 'declarative':
+      definition = 'repeat(choice($.rule_declaration, $.constraint, $.fact))';
+      break;
+    case 'functional':
+      definition = 'repeat(choice($.function_definition, $.expression, $.binding))';
+      break;
+    case 'object-oriented':
+      definition = 'repeat(choice($.class_definition, $.statement, $.expression))';
+      break;
+    case 'procedural':
+    case 'mixed':
+    default:
+      definition = 'repeat(choice($.statement, $.expression, $.declaration))';
+      break;
+  }
+  
+  return {
+    name: 'source_file',
+    definition,
+    comment: `Root rule for ${architecture.paradigm} language`
+  };
+}
+
+/**
+ * Generate basic token rules (identifiers, numbers, strings, comments)
+ */
+function generateTokenRules(syntax: LanguageSyntax): GrammarRule[] {
+  const rules: GrammarRule[] = [];
+  
+  // Identifier
+  rules.push({
+    name: 'identifier',
+    definition: `/${syntax.identifiers.pattern}/`,
+    comment: `Identifiers: ${syntax.identifiers.examples.join(', ')}`
+  });
+  
+  // Numbers
+  rules.push({
+    name: 'number',
+    definition: `/${syntax.numbers.pattern}/`,
+    comment: `Numbers: ${syntax.numbers.examples.join(', ')}`
+  });
+  
+  // Strings
+  rules.push({
+    name: 'string',
+    definition: `/${syntax.strings.pattern}/`,
+    comment: `Strings: ${syntax.strings.examples.join(', ')}`
+  });
+  
+  // Comments
+  const commentRuleName = getCommentRuleName(syntax.comments.pattern);
+  rules.push({
+    name: commentRuleName,
+    definition: `/${escapeRegex(syntax.comments.pattern)}.*$/`,
+    comment: `Line comments starting with ${syntax.comments.pattern}`
+  });
+  
+  return rules;
+}
+
+/**
+ * Generate paradigm-specific rules
+ */
+function generateParadigmRules(
+  architecture: LanguageArchitecture,
+  features: LanguageFeatures,
+  syntax: LanguageSyntax
+): GrammarRule[] {
+  const rules: GrammarRule[] = [];
+  
+  // Variable declarations (common to most paradigms)
+  rules.push({
+    name: 'variable_declaration',
+    definition: `seq("${syntax.variables.keyword}", $.identifier, "${syntax.variables.operator}", $.expression, "${syntax.variables.terminator}")`,
+    comment: `Variable declarations: ${syntax.variables.example}`
+  });
+  
+  // Expression rule (fundamental to all paradigms)
+  rules.push(generateExpressionRule(architecture, features));
+  
+  // Statement rule (for imperative paradigms)
+  if (['procedural', 'object-oriented', 'mixed'].includes(architecture.paradigm)) {
+    rules.push(generateStatementRule(architecture, features));
+  }
+  
+  // Add paradigm-specific constructs
+  switch (architecture.paradigm) {
+    case 'object-oriented':
+      if (syntax.paradigmExamples.class) {
+        rules.push(generateClassRule(syntax.paradigmExamples.class));
+      }
+      break;
+    case 'functional':
+      if (syntax.paradigmExamples.function) {
+        rules.push(generateFunctionRule(syntax.paradigmExamples.function, features.functionTypes));
+      }
+      break;
+    case 'declarative':
+      if (syntax.paradigmExamples.rule) {
+        rules.push(generateDeclarativeRule(syntax.paradigmExamples.rule));
+      }
+      break;
+  }
+  
+  return rules;
+}
+
+/**
+ * Generate expression rule based on paradigm
+ */
+function generateExpressionRule(architecture: LanguageArchitecture, features: LanguageFeatures): GrammarRule {
+  const choices = [
+    '$.identifier',
+    '$.number', 
+    '$.string',
+    '$.parenthesized_expression'
+  ];
+  
+  // Add function calls if functions are supported
+  if (features.functionTypes.length > 0) {
+    choices.push('$.function_call');
+  }
+  
+  // Add data structure literals
+  if (features.dataStructures.includes('arrays')) {
+    choices.push('$.array_literal');
+  }
+  if (features.dataStructures.includes('objects')) {
+    choices.push('$.object_literal');
+  }
+  
+  // Add binary operations for most paradigms
+  if (architecture.paradigm !== 'declarative') {
+    choices.push('$.binary_expression');
+  }
+  
+  return {
+    name: 'expression',
+    definition: `choice(${choices.join(', ')})`,
+    comment: 'Expression rule covering all expression types'
+  };
+}
+
+/**
+ * Generate statement rule for imperative paradigms
+ */
+function generateStatementRule(architecture: LanguageArchitecture, features: LanguageFeatures): GrammarRule {
+  const choices = [
+    '$.variable_declaration',
+    '$.expression_statement'
+  ];
+  
+  // Add control flow statements
+  if (features.controlFlow.includes('conditionals')) {
+    choices.push('$.if_statement');
+  }
+  if (features.controlFlow.includes('loops')) {
+    choices.push('$.for_statement', '$.while_statement');
+  }
+  
+  return {
+    name: 'statement',
+    definition: `choice(${choices.join(', ')})`,
+    comment: 'Statement rule for imperative constructs'
+  };
+}
+
+/**
+ * Generate data structure rules
+ */
+function generateDataStructureRules(dataStructures: string[]): GrammarRule[] {
+  const rules: GrammarRule[] = [];
+  
+  if (dataStructures.includes('arrays')) {
+    rules.push({
+      name: 'array_literal',
+      definition: 'seq("[", optional(seq($.expression, repeat(seq(",", $.expression)))), "]")',
+      comment: 'Array literals: [1, 2, 3]'
+    });
+  }
+  
+  if (dataStructures.includes('objects')) {
+    rules.push({
+      name: 'object_literal',
+      definition: 'seq("{", optional(seq($.property, repeat(seq(",", $.property)))), "}")',
+      comment: 'Object literals: {key: value}'
+    });
+    
+    rules.push({
+      name: 'property',
+      definition: 'seq(choice($.identifier, $.string), ":", $.expression)',
+      comment: 'Object property: key: value'
+    });
+  }
+  
+  if (dataStructures.includes('tuples')) {
+    rules.push({
+      name: 'tuple_literal',
+      definition: 'seq("(", $.expression, repeat1(seq(",", $.expression)), ")")',
+      comment: 'Tuple literals: (a, b, c)'
+    });
+  }
+  
+  return rules;
+}
+
+/**
+ * Generate control flow rules
+ */
+function generateControlFlowRules(controlFlow: string[], syntax: LanguageSyntax): GrammarRule[] {
+  const rules: GrammarRule[] = [];
+  
+  if (controlFlow.includes('conditionals')) {
+    rules.push({
+      name: 'if_statement',
+      definition: 'seq("if", "(", $.expression, ")", $.block, optional(seq("else", choice($.if_statement, $.block))))',
+      comment: 'If-else statements'
+    });
+    
+    rules.push({
+      name: 'block',
+      definition: 'seq("{", repeat($.statement), "}")',
+      comment: 'Code blocks'
+    });
+  }
+  
+  if (controlFlow.includes('loops')) {
+    rules.push({
+      name: 'while_statement',
+      definition: 'seq("while", "(", $.expression, ")", $.block)',
+      comment: 'While loops'
+    });
+    
+    rules.push({
+      name: 'for_statement',
+      definition: 'seq("for", "(", optional($.statement), ";", optional($.expression), ";", optional($.expression), ")", $.block)',
+      comment: 'For loops'
+    });
+  }
+  
+  return rules;
+}
+
+/**
+ * Generate class rule from user example
+ */
+function generateClassRule(classExample: string): GrammarRule {
+  // Simple class rule - could be enhanced with more parsing
+  return {
+    name: 'class_definition',
+    definition: 'seq("class", $.identifier, "{", repeat($.method_definition), "}")',
+    comment: `Class definition based on: ${classExample}`
+  };
+}
+
+/**
+ * Generate function rule from user example
+ */
+function generateFunctionRule(functionExample: string, functionTypes: string[]): GrammarRule {
+  let definition = 'seq("function", $.identifier, "(", optional($.parameter_list), ")", $.block)';
+  
+  // Add arrow functions if supported
+  if (functionTypes.includes('anonymous')) {
+    definition = `choice(${definition}, $.arrow_function)`;
+  }
+  
+  return {
+    name: 'function_definition',
+    definition,
+    comment: `Function definition based on: ${functionExample}`
+  };
+}
+
+/**
+ * Generate declarative rule from user example
+ */
+function generateDeclarativeRule(ruleExample: string): GrammarRule {
+  return {
+    name: 'rule_declaration',
+    definition: 'seq("rule", $.identifier, optional(seq("when", $.expression)))',
+    comment: `Rule declaration based on: ${ruleExample}`
+  };
+}
+
+/**
+ * Generate precedences based on paradigm
+ */
+function generatePrecedences(architecture: LanguageArchitecture, features: LanguageFeatures): string[][] {
+  // Basic precedence for binary operations
+  const precedences = [
+    ['$.binary_expression']
+  ];
+  
+  // Add function call precedence if functions are supported
+  if (features.functionTypes.length > 0) {
+    precedences.push(['$.function_call']);
+  }
+  
+  return precedences;
+}
+
+/**
+ * Generate the complete grammar.js file content
+ */
+export function generateGrammarFile(grammar: GeneratedGrammar): string {
+  const lines: string[] = [];
+  
+  lines.push('/**');
+  lines.push(` * Grammar for ${grammar.name}`);
+  lines.push(' * Generated by DSK (DSL Development Kit)');
+  lines.push(' */');
+  lines.push('');
+  lines.push('module.exports = grammar({');
+  lines.push(`  name: '${grammar.name}',`);
+  lines.push('');
+  
+  // Add word token if specified
+  if (grammar.word) {
+    lines.push(`  word: $ => $.${grammar.word},`);
+    lines.push('');
+  }
+  
+  // Add rules
+  lines.push('  rules: {');
+  
+  grammar.rules.forEach((rule, index) => {
+    if (rule.comment) {
+      lines.push(`    // ${rule.comment}`);
+    }
+    lines.push(`    ${rule.name}: $ => ${rule.definition}${index < grammar.rules.length - 1 ? ',' : ''}`);
+    if (index < grammar.rules.length - 1) {
+      lines.push('');
+    }
+  });
+  
+  lines.push('  }');
+  
+  // Add extras
+  if (grammar.extras.length > 0) {
+    lines.push(',');
+    lines.push('');
+    lines.push('  extras: $ => [');
+    grammar.extras.forEach((extra, index) => {
+      lines.push(`    ${extra}${index < grammar.extras.length - 1 ? ',' : ''}`);
+    });
+    lines.push('  ]');
+  }
+  
+  // Add conflicts if any
+  if (grammar.conflicts.length > 0) {
+    lines.push(',');
+    lines.push('');
+    lines.push('  conflicts: $ => [');
+    grammar.conflicts.forEach((conflict, index) => {
+      lines.push(`    [${conflict.join(', ')}]${index < grammar.conflicts.length - 1 ? ',' : ''}`);
+    });
+    lines.push('  ]');
+  }
+  
+  // Add precedences if any
+  if (grammar.precedences.length > 0) {
+    lines.push(',');
+    lines.push('');
+    lines.push('  precedences: $ => [');
+    grammar.precedences.forEach((prec, index) => {
+      lines.push(`    [${prec.join(', ')}]${index < grammar.precedences.length - 1 ? ',' : ''}`);
+    });
+    lines.push('  ]');
+  }
+  
+  lines.push('});');
+  lines.push('');
+  
+  return lines.join('\n');
+}
+
+/**
+ * Helper functions
+ */
+function getCommentRuleName(commentPattern: string): string {
+  switch (commentPattern) {
+    case '//': return 'line_comment_slash';
+    case '#': return 'line_comment_hash';
+    case ';': return 'line_comment_semicolon';
+    default: return 'line_comment';
+  }
+}
+
+function escapeRegex(pattern: string): string {
+  return pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
+}
diff --git a/tree-sitter/dsk/dsk-cli/src/utils/inference.ts b/tree-sitter/dsk/dsk-cli/src/utils/inference.ts
new file mode 100644
index 0000000..f49e176
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/utils/inference.ts
@@ -0,0 +1,286 @@
+/**
+ * Pattern Inference Engine
+ * 
+ * Infers regular expression patterns from user examples.
+ * Uses an extensible library of common token patterns with solid defaults.
+ */
+
+export interface TokenPattern {
+  name: string;
+  description: string;
+  regex: RegExp;
+  examples: string[];
+  priority: number; // Higher priority patterns are tried first
+}
+
+export interface InferenceResult {
+  pattern: TokenPattern | null;
+  confidence: number; // 0-1 score indicating match quality
+  matchedExamples: string[];
+  rejectedExamples: string[];
+}
+
+/**
+ * Default token pattern library with common programming language constructs
+ */
+export const DEFAULT_PATTERNS: TokenPattern[] = [
+  // Identifiers
+  {
+    name: 'c_identifier',
+    description: 'C-style identifier (letters, digits, underscore, must start with letter/underscore)',
+    regex: /^[a-zA-Z_][a-zA-Z0-9_]*$/,
+    examples: ['myVar', 'userName', '_private', 'MAX_SIZE'],
+    priority: 10
+  },
+  {
+    name: 'js_identifier',
+    description: 'JavaScript-style identifier (letters, digits, $, _, must start with letter/$/_)',
+    regex: /^[A-Za-z_$][A-Za-z0-9_$]*$/,
+    examples: ['x', 'var1', '$var', '_var', 'Var3', 'BananaFruitStand'],
+    priority: 11
+  },
+  {
+    name: 'kebab_identifier', 
+    description: 'Kebab-case identifier (letters, digits, hyphens)',
+    regex: /^[a-zA-Z][a-zA-Z0-9-]*[a-zA-Z0-9]$/,
+    examples: ['my-var', 'user-name', 'max-size'],
+    priority: 8
+  },
+  {
+    name: 'camel_identifier',
+    description: 'CamelCase identifier (letters and digits, no separators)',
+    regex: /^[a-zA-Z][a-zA-Z0-9]*$/,
+    examples: ['myVar', 'userName', 'maxSize'],
+    priority: 9
+  },
+
+  // Numbers
+  {
+    name: 'number_general',
+    description: 'Integer or floating point number (optional sign)',
+    regex: /^[+-]?(?:\d*\.\d+|\d+\.\d*|\d+)$/,
+    examples: ['1', '-7', '1.24', '10000', '+0.5', '2.'],
+    priority: 16
+  },
+  {
+    name: 'integer',
+    description: 'Integer number (optional sign, digits)',
+    regex: /^[+-]?\d+$/,
+    examples: ['42', '-17', '+123', '0'],
+    priority: 15
+  },
+  {
+    name: 'float',
+    description: 'Floating point number (optional sign, decimal point)',
+    regex: /^[+-]?\d*\.\d+$/,
+    examples: ['3.14', '-2.5', '+0.123', '.5'],
+    priority: 14
+  },
+  {
+    name: 'scientific',
+    description: 'Scientific notation number',
+    regex: /^[+-]?\d*\.?\d+[eE][+-]?\d+$/,
+    examples: ['1e10', '3.14e-2', '-2.5E+3'],
+    priority: 12
+  },
+  {
+    name: 'hex_number',
+    description: 'Hexadecimal number (0x prefix)',
+    regex: /^0[xX][0-9a-fA-F]+$/,
+    examples: ['0xFF', '0x123ABC', '0X00'],
+    priority: 13
+  },
+
+  // Strings
+  {
+    name: 'double_quoted_string',
+    description: 'Double-quoted string literal',
+    regex: /^".*"$/,
+    examples: ['"hello"', '"world"', '""'],
+    priority: 11
+  },
+  {
+    name: 'single_quoted_string',
+    description: 'Single-quoted string literal',
+    regex: /^'.*'$/,
+    examples: ["'hello'", "'world'", "''"],
+    priority: 11
+  },
+  {
+    name: 'backtick_string',
+    description: 'Backtick-quoted string literal (template strings)',
+    regex: /^`.*`$/,
+    examples: ['`hello`', '`world ${var}`', '``'],
+    priority: 7
+  },
+
+  // Comments
+  {
+    name: 'c_line_comment',
+    description: 'C-style line comment (// prefix)',
+    regex: /^\/\/.*$/,
+    examples: ['// comment', '// TODO: fix this'],
+    priority: 16
+  },
+  {
+    name: 'hash_line_comment',
+    description: 'Hash line comment (# prefix)',
+    regex: /^#.*$/,
+    examples: ['# comment', '# TODO: fix this'],
+    priority: 16
+  },
+  {
+    name: 'semicolon_line_comment',
+    description: 'Semicolon line comment (; prefix)',
+    regex: /^;.*$/,
+    examples: ['; comment', '; TODO: fix this'],
+    priority: 16
+  },
+
+  // Special patterns
+  {
+    name: 'boolean',
+    description: 'Boolean literal',
+    regex: /^(true|false)$/,
+    examples: ['true', 'false'],
+    priority: 17
+  },
+  {
+    name: 'null_literal',
+    description: 'Null/nil literal',
+    regex: /^(null|nil|None|undefined)$/,
+    examples: ['null', 'nil', 'None', 'undefined'],
+    priority: 17
+  }
+];
+
+/**
+ * Infer a pattern from valid and invalid examples
+ */
+export function inferPattern(
+  validExamples: string[],
+  invalidExamples: string[] = [],
+  customPatterns: TokenPattern[] = []
+): InferenceResult {
+  if (validExamples.length === 0) {
+    return {
+      pattern: null,
+      confidence: 0,
+      matchedExamples: [],
+      rejectedExamples: invalidExamples
+    };
+  }
+
+  // Combine default patterns with custom patterns
+  const allPatterns = [...customPatterns, ...DEFAULT_PATTERNS]
+    .sort((a, b) => b.priority - a.priority);
+
+  // Try each pattern
+  for (const pattern of allPatterns) {
+    const validMatches = validExamples.filter(example => pattern.regex.test(example));
+    const invalidMatches = invalidExamples.filter(example => pattern.regex.test(example));
+
+    // Pattern must match ALL valid examples and NO invalid examples
+    if (validMatches.length === validExamples.length && invalidMatches.length === 0) {
+      const confidence = calculateConfidence(validExamples, invalidExamples, pattern);
+      
+      return {
+        pattern,
+        confidence,
+        matchedExamples: validMatches,
+        rejectedExamples: invalidExamples
+      };
+    }
+  }
+
+  // No pattern found
+  return {
+    pattern: null,
+    confidence: 0,
+    matchedExamples: [],
+    rejectedExamples: invalidExamples
+  };
+}
+
+/**
+ * Calculate confidence score for a pattern match
+ */
+function calculateConfidence(
+  validExamples: string[],
+  invalidExamples: string[],
+  pattern: TokenPattern
+): number {
+  let confidence = 0.8; // Base confidence
+
+  // Boost confidence for more valid examples
+  if (validExamples.length >= 3) confidence += 0.1;
+  if (validExamples.length >= 5) confidence += 0.05;
+
+  // Boost confidence for having invalid examples that were correctly rejected
+  if (invalidExamples.length > 0) confidence += 0.05;
+
+  // Boost confidence if examples match the pattern's own examples
+  const patternExampleMatches = validExamples.filter(ex => 
+    pattern.examples.some(pex => ex === pex)
+  );
+  if (patternExampleMatches.length > 0) {
+    confidence += 0.05 * patternExampleMatches.length;
+  }
+
+  return Math.min(confidence, 1.0);
+}
+
+/**
+ * Generate a custom regex pattern from examples (fallback when inference fails)
+ */
+export function generateCustomPattern(
+  validExamples: string[],
+  invalidExamples: string[] = []
+): string {
+  if (validExamples.length === 0) return '';
+
+  // Simple approach: create alternation of literal examples
+  // This is a fallback - not as robust as proper pattern matching
+  const escapedExamples = validExamples.map(ex => 
+    ex.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
+  );
+
+  return `^(${escapedExamples.join('|')})$`;
+}
+
+/**
+ * Validate that a pattern works correctly with given examples
+ */
+export function validatePattern(
+  pattern: string,
+  validExamples: string[],
+  invalidExamples: string[] = []
+): { isValid: boolean; errors: string[] } {
+  const errors: string[] = [];
+  
+  try {
+    const regex = new RegExp(pattern);
+    
+    // Check valid examples
+    for (const example of validExamples) {
+      if (!regex.test(example)) {
+        errors.push(`Pattern does not match valid example: "${example}"`);
+      }
+    }
+    
+    // Check invalid examples
+    for (const example of invalidExamples) {
+      if (regex.test(example)) {
+        errors.push(`Pattern incorrectly matches invalid example: "${example}"`);
+      }
+    }
+    
+  } catch (e) {
+    errors.push(`Invalid regular expression: ${e instanceof Error ? e.message : 'Unknown error'}`);
+  }
+
+  return {
+    isValid: errors.length === 0,
+    errors
+  };
+}
diff --git a/tree-sitter/dsk/dsk-cli/src/utils/template-processor.ts b/tree-sitter/dsk/dsk-cli/src/utils/template-processor.ts
new file mode 100644
index 0000000..e237048
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/src/utils/template-processor.ts
@@ -0,0 +1,192 @@
+/**
+ * Template Processor
+ * 
+ * Processes template files by replacing placeholders with actual values
+ */
+
+import { readFileSync, writeFileSync, readdirSync, statSync, mkdirSync, copyFileSync } from 'fs';
+import { join, dirname, basename, extname } from 'path';
+import { LanguageArchitecture, LanguageFeatures, LanguageSyntax } from '../commands/new.js';
+
+export interface TemplateContext {
+  architecture: LanguageArchitecture;
+  features: LanguageFeatures;
+  syntax: LanguageSyntax;
+}
+
+/**
+ * Process a template directory and create a project
+ */
+export function processTemplate(
+  templateDir: string,
+  outputDir: string,
+  context: TemplateContext
+): void {
+  // Create output directory
+  mkdirSync(outputDir, { recursive: true });
+  
+  // Process all files in template directory
+  processDirectory(templateDir, outputDir, context);
+}
+
+/**
+ * Process a directory recursively
+ */
+function processDirectory(
+  sourceDir: string,
+  targetDir: string,
+  context: TemplateContext
+): void {
+  const items = readdirSync(sourceDir);
+  
+  for (const item of items) {
+    const sourcePath = join(sourceDir, item);
+    const targetPath = join(targetDir, processFileName(item, context));
+    
+    const stat = statSync(sourcePath);
+    
+    if (stat.isDirectory()) {
+      mkdirSync(targetPath, { recursive: true });
+      processDirectory(sourcePath, targetPath, context);
+    } else {
+      processFile(sourcePath, targetPath, context);
+    }
+  }
+}
+
+/**
+ * Process a single file
+ */
+function processFile(
+  sourcePath: string,
+  targetPath: string,
+  context: TemplateContext
+): void {
+  const content = readFileSync(sourcePath, 'utf-8');
+  const processedContent = processContent(content, context);
+  
+  // Ensure target directory exists
+  mkdirSync(dirname(targetPath), { recursive: true });
+  
+  writeFileSync(targetPath, processedContent, 'utf-8');
+}
+
+/**
+ * Process file name placeholders
+ */
+function processFileName(fileName: string, context: TemplateContext): string {
+  const replacements = getReplacements(context);
+  
+  let processed = fileName;
+  for (const [placeholder, value] of Object.entries(replacements)) {
+    processed = processed.replace(new RegExp(placeholder, 'g'), value);
+  }
+  
+  return processed;
+}
+
+/**
+ * Process file content placeholders
+ */
+function processContent(content: string, context: TemplateContext): string {
+  const replacements = getReplacements(context);
+  
+  let processed = content;
+  for (const [placeholder, value] of Object.entries(replacements)) {
+    processed = processed.replace(new RegExp(placeholder, 'g'), value);
+  }
+  
+  return processed;
+}
+
+/**
+ * Get all placeholder replacements
+ */
+function getReplacements(context: TemplateContext): Record<string, string> {
+  const { architecture, features, syntax } = context;
+  
+  return {
+    '__DSL_NAME__': architecture.name,
+    '__PARADIGM__': architecture.paradigm,
+    '__PURPOSE__': architecture.purpose,
+    '__DATA_PHILOSOPHY__': architecture.dataPhilosophy,
+    
+    // Syntax elements
+    '__VARIABLE_KEYWORD__': syntax.variables.keyword,
+    '__ASSIGNMENT_OPERATOR__': syntax.variables.operator,
+    '__TERMINATOR__': syntax.variables.terminator,
+    '__VARIABLE_EXAMPLE__': syntax.variables.example,
+    
+    '__COMMENT_EXAMPLE__': syntax.comments.pattern,
+    '__STRING_EXAMPLE__': syntax.strings.examples[0] || '"hello"',
+    '__NUMBER_EXAMPLE__': syntax.numbers.examples[0] || '42',
+    '__IDENTIFIER_EXAMPLE__': syntax.identifiers.examples[0] || 'myVar',
+    
+    // Paradigm-specific examples
+    '__PARADIGM_EXAMPLE__': getParadigmExample(architecture, syntax),
+    
+    // File extension
+    '__EXT__': getFileExtension(architecture.name),
+    
+    // Feature lists
+    '__FEATURES_LIST__': generateFeaturesList(features),
+    '__DATA_STRUCTURES__': features.dataStructures.join(', ') || 'Basic types',
+    
+    // Control flow
+    '__CONTROL_FLOW__': features.controlFlow.join(', ') || 'Sequential execution'
+  };
+}
+
+/**
+ * Get paradigm-specific example code
+ */
+function getParadigmExample(architecture: LanguageArchitecture, syntax: LanguageSyntax): string {
+  if (syntax.paradigmExamples.class) {
+    return syntax.paradigmExamples.class;
+  }
+  if (syntax.paradigmExamples.function) {
+    return syntax.paradigmExamples.function;
+  }
+  if (syntax.paradigmExamples.rule) {
+    return syntax.paradigmExamples.rule;
+  }
+  
+  // Fallback to variable example
+  return syntax.variables.example;
+}
+
+/**
+ * Get appropriate file extension for the language
+ */
+function getFileExtension(languageName: string): string {
+  // Simple heuristic - could be made configurable
+  const name = languageName.toLowerCase();
+  
+  if (name.includes('script')) return 'script';
+  if (name.includes('lang')) return 'lang';
+  if (name.includes('dsl')) return 'dsl';
+  
+  // Default: use first 3 characters of name
+  return name.substring(0, 3);
+}
+
+/**
+ * Generate features list for README
+ */
+function generateFeaturesList(features: LanguageFeatures): string {
+  const items: string[] = [];
+  
+  if (features.controlFlow.length > 0) {
+    items.push(`- **Control Flow**: ${features.controlFlow.join(', ')}`);
+  }
+  
+  if (features.dataStructures.length > 0) {
+    items.push(`- **Data Structures**: ${features.dataStructures.join(', ')}`);
+  }
+  
+  if (features.functionTypes.length > 0) {
+    items.push(`- **Functions**: ${features.functionTypes.join(', ')}`);
+  }
+  
+  return items.length > 0 ? items.join('\n') : '- Basic language constructs';
+}
diff --git a/tree-sitter/dsk/dsk-cli/templates/default/README.md b/tree-sitter/dsk/dsk-cli/templates/default/README.md
new file mode 100644
index 0000000..4560c6a
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/default/README.md
@@ -0,0 +1,82 @@
+# __DSL_NAME__
+
+A Domain-Specific Language created with DSK (DSL Development Kit).
+
+## Overview
+
+- **Language Type**: __PARADIGM__
+- **Purpose**: __PURPOSE__
+- **Data Philosophy**: __DATA_PHILOSOPHY__
+
+## Getting Started
+
+### Prerequisites
+
+- [Tree-sitter CLI](https://tree-sitter.github.io/tree-sitter/creating-parsers#installation)
+- Node.js or Bun (for JavaScript bindings)
+- C compiler (gcc/clang) for native compilation
+
+### Development
+
+```bash
+# Start development mode with file watching
+dsk dev
+
+# Run tests
+dsk test
+
+# Build parser and packages
+dsk build
+
+# Generate editor syntax highlighting
+dsk highlight
+
+# Package for distribution
+dsk package
+```
+
+## Project Structure
+
+```
+__DSL_NAME__/
+├── grammar.js          # Tree-sitter grammar definition
+├── corpus/             # Test cases for the grammar
+│   └── examples.txt
+├── generated/          # Generated artifacts (created by dsk build)
+│   ├── c/             # C static library
+│   ├── js/            # JavaScript/Node.js package
+│   └── editors/       # Editor configurations
+└── examples/          # Example programs in your language
+    └── hello.__EXT__
+```
+
+## Language Features
+
+__FEATURES_LIST__
+
+## Grammar Rules
+
+The grammar includes rules for:
+
+- **Tokens**: Identifiers, numbers, strings, comments
+- **Expressions**: Based on your language paradigm
+- **Statements**: Control flow and declarations
+- **Data Structures**: __DATA_STRUCTURES__
+
+## Next Steps
+
+1. **Customize the Grammar**: Edit `grammar.js` to refine your language syntax
+2. **Add Test Cases**: Create examples in `corpus/examples.txt`
+3. **Write Example Programs**: Add sample code to `examples/`
+4. **Generate Editor Support**: Run `dsk highlight` for syntax highlighting
+5. **Build and Test**: Use `dsk dev` for iterative development
+
+## Resources
+
+- [Tree-sitter Documentation](https://tree-sitter.github.io/tree-sitter/)
+- [DSK Documentation](https://github.com/your-org/dsk)
+- [Language Implementation Patterns](https://pragprog.com/titles/tpdsl/)
+
+---
+
+Generated by [DSK](https://github.com/your-org/dsk) - DSL Development Kit
diff --git a/tree-sitter/dsk/dsk-cli/templates/default/corpus/examples.txt b/tree-sitter/dsk/dsk-cli/templates/default/corpus/examples.txt
new file mode 100644
index 0000000..94f199e
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/default/corpus/examples.txt
@@ -0,0 +1,85 @@
+================================================================================
+Basic Examples
+================================================================================
+
+Simple variable declaration
+---
+
+__VARIABLE_EXAMPLE__
+
+---
+
+(source_file
+  (statement
+    (variable_declaration
+      (identifier)
+      (expression (number)))))
+
+================================================================================
+Comments
+================================================================================
+
+Line comment
+---
+
+__COMMENT_EXAMPLE__ This is a comment
+__VARIABLE_EXAMPLE__
+
+---
+
+(source_file
+  (statement
+    (variable_declaration
+      (identifier)
+      (expression (number)))))
+
+================================================================================
+Expressions
+================================================================================
+
+String literal via variable declaration
+---
+
+__VARIABLE_KEYWORD__ message __ASSIGNMENT_OPERATOR__ __STRING_EXAMPLE____TERMINATOR__
+
+---
+
+(source_file
+  (statement
+    (variable_declaration
+      (identifier)
+      (expression (string)))))
+
+================================================================================
+Numbers
+================================================================================
+
+Integer via variable declaration
+---
+
+__VARIABLE_KEYWORD__ count __ASSIGNMENT_OPERATOR__ __NUMBER_EXAMPLE____TERMINATOR__
+
+---
+
+(source_file
+  (statement
+    (variable_declaration
+      (identifier)
+      (expression (number)))))
+
+================================================================================
+Identifiers
+================================================================================
+
+Valid identifier via variable declaration
+---
+
+__VARIABLE_KEYWORD__ __IDENTIFIER_EXAMPLE__ __ASSIGNMENT_OPERATOR__ 42__TERMINATOR__
+
+---
+
+(source_file
+  (statement
+    (variable_declaration
+      (identifier)
+      (expression (number)))))
diff --git a/tree-sitter/dsk/dsk-cli/templates/default/examples/hello.__EXT__ b/tree-sitter/dsk/dsk-cli/templates/default/examples/hello.__EXT__
new file mode 100644
index 0000000..07769f6
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/default/examples/hello.__EXT__
@@ -0,0 +1,7 @@
+__COMMENT_EXAMPLE__ Hello World example for __DSL_NAME__
+
+__PARADIGM_EXAMPLE__
+
+__COMMENT_EXAMPLE__ More examples:
+__VARIABLE_EXAMPLE__
+__VARIABLE_KEYWORD__ __IDENTIFIER_EXAMPLE__ __ASSIGNMENT_OPERATOR__ __STRING_EXAMPLE____TERMINATOR__
diff --git a/tree-sitter/dsk/dsk-cli/templates/default/package.json b/tree-sitter/dsk/dsk-cli/templates/default/package.json
new file mode 100644
index 0000000..ab3d0c4
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/default/package.json
@@ -0,0 +1,9 @@
+{
+  "name": "__DSL_NAME__-dsl",
+  "private": true,
+  "type": "commonjs",
+  "description": "DSL project for __DSL_NAME__ generated by DSK",
+  "license": "MIT"
+}
+
+
diff --git a/tree-sitter/dsk/dsk-cli/templates/js-addon/binding.gyp b/tree-sitter/dsk/dsk-cli/templates/js-addon/binding.gyp
new file mode 100644
index 0000000..ddb424b
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/js-addon/binding.gyp
@@ -0,0 +1,28 @@
+{
+  "targets": [
+    {
+      "target_name": "tree_sitter___DSL_NAME___binding",
+      "dependencies": [
+        "<!(node -p \"require('node-addon-api').gyp\")"
+      ],
+      "include_dirs": [
+        "<!@(node -p \"require('node-addon-api').include\")",
+        "src"
+      ],
+      "sources": [
+        "bindings/node.cc",
+        "src/parser.c"
+      ],
+      "cflags_c": [
+        "-std=c99"
+      ],
+      "cflags_cc": [
+        "-std=c++14"
+      ],
+      "defines": [
+        "NAPI_DISABLE_CPP_EXCEPTIONS",
+        "NAPI_VERSION=8"
+      ]
+    }
+  ]
+}
diff --git a/tree-sitter/dsk/dsk-cli/templates/js-addon/bindings/node.cc b/tree-sitter/dsk/dsk-cli/templates/js-addon/bindings/node.cc
new file mode 100644
index 0000000..d0ba098
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/js-addon/bindings/node.cc
@@ -0,0 +1,16 @@
+#include "napi.h"
+
+typedef struct TSLanguage TSLanguage;
+
+extern "C" TSLanguage *tree_sitter___DSL_NAME__();
+
+// "tree_sitter___DSL_NAME___binding" is the symbol that gets exported
+// when this file is compiled as a Node.js addon.
+Napi::Object Init(Napi::Env env, Napi::Object exports) {
+  exports["name"] = Napi::String::New(env, "__DSL_NAME__");
+  auto language = tree_sitter___DSL_NAME__();
+  exports["language"] = Napi::External<TSLanguage>::New(env, language);
+  return exports;
+}
+
+NODE_API_MODULE(tree_sitter___DSL_NAME___binding, Init)
diff --git a/tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts b/tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts
new file mode 100644
index 0000000..d25eae0
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts
@@ -0,0 +1,3 @@
+declare const _exports: any;
+export = _exports;
+//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts.map b/tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts.map
new file mode 100644
index 0000000..ca7a93a
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/js-addon/index.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.js"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/tree-sitter/dsk/dsk-cli/templates/js-addon/index.js b/tree-sitter/dsk/dsk-cli/templates/js-addon/index.js
new file mode 100644
index 0000000..4780f31
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/js-addon/index.js
@@ -0,0 +1,15 @@
+try {
+  module.exports = require("./build/Release/tree_sitter___DSL_NAME___binding");
+} catch (error1) {
+  if (error1.code !== 'MODULE_NOT_FOUND') {
+    throw error1;
+  }
+  try {
+    module.exports = require("./build/Debug/tree_sitter___DSL_NAME___binding");
+  } catch (error2) {
+    if (error2.code !== 'MODULE_NOT_FOUND') {
+      throw error2;
+    }
+    throw error1
+  }
+}
diff --git a/tree-sitter/dsk/dsk-cli/templates/js-addon/package.json b/tree-sitter/dsk/dsk-cli/templates/js-addon/package.json
new file mode 100644
index 0000000..c7a0488
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/templates/js-addon/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "tree-sitter-__DSL_NAME__",
+  "version": "1.0.0",
+  "description": "Tree-sitter parser for __DSL_NAME__",
+  "main": "index.js",
+  "keywords": [
+    "tree-sitter",
+    "parser",
+    "__DSL_NAME__"
+  ],
+  "author": "Generated by DSK",
+  "license": "MIT",
+  "dependencies": {
+    "node-addon-api": "^7.0.0",
+    "node-gyp": "^10.0.0"
+  },
+  "devDependencies": {
+    "tree-sitter-cli": "^0.20.0"
+  },
+  "scripts": {
+    "install": "node-gyp rebuild",
+    "test": "tree-sitter test"
+  },
+  "gypfile": true,
+  "files": [
+    "grammar.js",
+    "src",
+    "index.js",
+    "binding.gyp"
+  ]
+}
diff --git a/tree-sitter/dsk/dsk-cli/test-inference.d.ts b/tree-sitter/dsk/dsk-cli/test-inference.d.ts
new file mode 100644
index 0000000..53c18d9
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/test-inference.d.ts
@@ -0,0 +1,5 @@
+/**
+ * Quick test of the inference engine
+ */
+export {};
+//# sourceMappingURL=test-inference.d.ts.map
\ No newline at end of file
diff --git a/tree-sitter/dsk/dsk-cli/test-inference.d.ts.map b/tree-sitter/dsk/dsk-cli/test-inference.d.ts.map
new file mode 100644
index 0000000..ecf80f8
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/test-inference.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"test-inference.d.ts","sourceRoot":"","sources":["test-inference.ts"],"names":[],"mappings":"AAAA;;GAEG"}
\ No newline at end of file
diff --git a/tree-sitter/dsk/dsk-cli/test-inference.js.map b/tree-sitter/dsk/dsk-cli/test-inference.js.map
new file mode 100644
index 0000000..5d9b120
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/test-inference.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"test-inference.js","sourceRoot":"","sources":["test-inference.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAExD,OAAO,CAAC,GAAG,CAAC,uCAAuC,CAAC,CAAC;AAErD,8BAA8B;AAC9B,OAAO,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAC;AAC3C,MAAM,OAAO,GAAG,YAAY,CAAC,CAAC,OAAO,EAAE,UAAU,EAAE,UAAU,CAAC,EAAE,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;AACpF,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACrD,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;AACrD,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;AAC/C,OAAO,CAAC,GAAG,EAAE,CAAC;AAEd,kBAAkB;AAClB,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC;AACvC,MAAM,OAAO,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAC;AACpE,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACrD,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;AACrD,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;AAC/C,OAAO,CAAC,GAAG,EAAE,CAAC;AAEd,kBAAkB;AAClB,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAC;AAC7C,MAAM,OAAO,GAAG,YAAY,CAAC,CAAC,SAAS,EAAE,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC,CAAC;AACjF,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACrD,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;AACrD,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;AAC/C,OAAO,CAAC,GAAG,EAAE,CAAC;AAEd,mBAAmB;AACnB,OAAO,CAAC,GAAG,CAAC,sCAAsC,CAAC,CAAC;AACpD,MAAM,OAAO,GAAG,YAAY,CAAC,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE,EAAE,CAAC,CAAC;AACxD,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,OAAO,EAAE,IAAI,IAAI,MAAM,CAAC,CAAC;AAC/D,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;AAC/C,OAAO,CAAC,GAAG,EAAE,CAAC;AAEd,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,CAAC"}
\ No newline at end of file
diff --git a/tree-sitter/dsk/dsk-cli/tsconfig.json b/tree-sitter/dsk/dsk-cli/tsconfig.json
new file mode 100644
index 0000000..e1599f7
--- /dev/null
+++ b/tree-sitter/dsk/dsk-cli/tsconfig.json
@@ -0,0 +1,37 @@
+{
+  "compilerOptions": {
+    // Enable latest features
+    "lib": ["ESNext", "DOM"],
+    "target": "ESNext",
+    "module": "ESNext",
+    "moduleDetection": "force",
+    "allowJs": true,
+
+    // Output configuration for CLI tool
+    "moduleResolution": "node",
+    "types": ["node"],
+    "outDir": "./dist",
+    "rootDir": "./src",
+    "declaration": true,
+    "declarationMap": true,
+    "sourceMap": true,
+
+    // Best practices
+    "strict": true,
+    "skipLibCheck": true,
+    "noFallthroughCasesInSwitch": true,
+
+    // Some stricter flags (disabled by default)
+    "noUnusedLocals": false,
+    "noUnusedParameters": false,
+    "noPropertyAccessFromIndexSignature": false
+  },
+  "include": [
+    "src/**/*"
+  ],
+  "exclude": [
+    "templates/**/*",
+    "dist/**/*",
+    "node_modules"
+  ]
+}
diff --git a/tree-sitter/dsk/test-build/generated/c/include/test_lang.h b/tree-sitter/dsk/test-build/generated/c/include/test_lang.h
new file mode 100644
index 0000000..9fcc831
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/c/include/test_lang.h
@@ -0,0 +1,16 @@
+#ifndef TREE_SITTER_TEST_LANG_H_
+#define TREE_SITTER_TEST_LANG_H_
+
+typedef struct TSLanguage TSLanguage;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+const TSLanguage *tree_sitter_test_lang(void);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // TREE_SITTER_TEST_LANG_H_
diff --git a/tree-sitter/dsk/test-build/generated/c/lib/libtest_lang.a b/tree-sitter/dsk/test-build/generated/c/lib/libtest_lang.a
new file mode 100644
index 0000000..d0be9b3
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/c/lib/libtest_lang.a
Binary files differdiff --git a/tree-sitter/dsk/test-build/generated/js/binding.gyp b/tree-sitter/dsk/test-build/generated/js/binding.gyp
new file mode 100644
index 0000000..2ed790a
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/binding.gyp
@@ -0,0 +1,27 @@
+{
+  "targets": [
+    {
+      "target_name": "tree_sitter_test_lang_binding",
+      "dependencies": [
+        "<!(node -p \"require('node-addon-api').gyp\")"
+      ],
+      "include_dirs": [
+        "<!(node -p \"require('node-addon-api').include\")",
+        "src"
+      ],
+      "sources": [
+        "bindings/node.cc",
+        "src/parser.c"
+      ],
+      "cflags_c": [
+        "-std=c99"
+      ],
+      "cflags_cc": [
+        "-std=c++14"
+      ],
+      "defines": [
+        "NAPI_DISABLE_CPP_EXCEPTIONS"
+      ]
+    }
+  ]
+}
diff --git a/tree-sitter/dsk/test-build/generated/js/bindings/node.cc b/tree-sitter/dsk/test-build/generated/js/bindings/node.cc
new file mode 100644
index 0000000..abea050
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/bindings/node.cc
@@ -0,0 +1,16 @@
+#include "napi.h"
+
+typedef struct TSLanguage TSLanguage;
+
+extern "C" TSLanguage *tree_sitter_test_lang();
+
+// "tree_sitter_test_lang_binding" is the symbol that gets exported
+// when this file is compiled as a Node.js addon.
+Napi::Object Init(Napi::Env env, Napi::Object exports) {
+  exports["name"] = Napi::String::New(env, "test_lang");
+  auto language = tree_sitter_test_lang();
+  exports["language"] = Napi::External<TSLanguage>::New(env, language);
+  return exports;
+}
+
+NODE_API_MODULE(tree_sitter_test_lang_binding, Init)
diff --git a/tree-sitter/dsk/test-build/generated/js/build/Makefile b/tree-sitter/dsk/test-build/generated/js/build/Makefile
new file mode 100644
index 0000000..9028cf0
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/Makefile
@@ -0,0 +1,352 @@
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := ..
+abs_srcdir := $(abspath $(srcdir))
+
+# The name of the builddir.
+builddir_name ?= .
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+  quiet=
+else
+  quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= Release
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+
+
+CC.target ?= $(CC)
+CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
+CXX.target ?= $(CXX)
+CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
+LINK.target ?= $(LINK)
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+PLI.target ?= pli
+
+# C++ apps need to be linked with g++.
+LINK ?= $(CXX.target)
+
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= gcc
+CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
+CXX.host ?= g++
+CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
+LINK.host ?= $(CXX.host)
+LDFLAGS.host ?= $(LDFLAGS_host)
+AR.host ?= ar
+PLI.host ?= pli
+
+# Define a dir function that can handle spaces.
+# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
+# "leading spaces cannot appear in the text of the first argument as written.
+# These characters can be put into the argument value by variable substitution."
+empty :=
+space := $(empty) $(empty)
+
+# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
+replace_spaces = $(subst $(space),?,$1)
+unreplace_spaces = $(subst ?,$(space),$1)
+dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
+
+# Flags to make gcc output dependency info.  Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$(call replace_spaces,$@).d
+DEPFLAGS = -MMD -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+#   foobar.o: DEP1 DEP2
+# into
+#   path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+#   foobar.o: DEP1 DEP2 \
+#               DEP3
+# to
+#   DEP1:
+#   DEP2:
+#   DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time.
+# Doesn't work with spaces, but that's fine: .d files have spaces in
+# their names replaced with other characters.
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+  grep -v '^$$'                             |\
+  sed -e 1d -e 's|$$|:|'                     \
+    >> $(depfile)
+rm $(depfile).raw
+endef
+
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) -o $@ $< $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) -o $@ $< $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c
+
+quiet_cmd_objc = CXX($(TOOLSET)) $@
+cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+quiet_cmd_objcxx = CXX($(TOOLSET)) $@
+cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# Commands for precompiled header files.
+quiet_cmd_pch_c = CXX($(TOOLSET)) $@
+cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
+cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_m = CXX($(TOOLSET)) $@
+cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
+cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# gyp-mac-tool is written next to the root Makefile by gyp.
+# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
+# already.
+quiet_cmd_mac_tool = MACTOOL $(4) $<
+cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
+
+quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
+cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
+
+quiet_cmd_infoplist = INFOPLIST $@
+cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
+
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@")
+
+quiet_cmd_symlink = SYMLINK $@
+cmd_symlink = ln -sf "$<" "$@"
+
+quiet_cmd_alink = LIBTOOL-STATIC $@
+cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+
+
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
+
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command.  Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+#   arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
+#                       $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+# .d files contain ? instead of spaces, take that into account.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
+                       $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+#   $? -- new prerequisites
+#   $| -- order-only dependencies
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
+
+# Helper that executes all postbuilds until one fails.
+define do_postbuilds
+  @E=0;\
+  for p in $(POSTBUILDS); do\
+    eval $$p;\
+    E=$$?;\
+    if [ $$E -ne 0 ]; then\
+      break;\
+    fi;\
+  done;\
+  if [ $$E -ne 0 ]; then\
+    rm -rf "$@";\
+    exit $$E;\
+  fi
+endef
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
+# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
+# spaces already and dirx strips the ? characters.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+  @$(call exact_echo,  $($(quiet)cmd_$(1)))
+  @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
+  $(if $(findstring flock,$(word 2,$(cmd_$1))),
+    @$(cmd_$(1))
+    @echo "  $(quiet_cmd_$(1)): Finished",
+    @$(cmd_$(1))
+  )
+  @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
+  @$(if $(2),$(fixup_dep))
+  $(if $(and $(3), $(POSTBUILDS)),
+    $(call do_postbuilds)
+  )
+)
+endef
+
+# Declare the "all" target first so it is the default,
+# even though we don't have the deps yet.
+.PHONY: all
+all:
+
+# make looks for ways to re-generate included makefiles, but in our case, we
+# don't have a direct way. Explicitly telling make that it has nothing to do
+# for them makes it go faster.
+%.d: ;
+
+# Use FORCE_DO_CMD to force a target to run.  Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+TOOLSET := target
+# Suffix rules, putting all outputs into $(obj).
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
+	@$(call do_cmd,objc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
+	@$(call do_cmd,objcxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+# Try building from generated source, too.
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
+	@$(call do_cmd,objc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
+	@$(call do_cmd,objcxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
+	@$(call do_cmd,objc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
+	@$(call do_cmd,objcxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,node_modules/node-addon-api/nothing.target.mk)))),)
+  include node_modules/node-addon-api/nothing.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,tree_sitter_test_lang_binding.target.mk)))),)
+  include tree_sitter_test_lang_binding.target.mk
+endif
+
+quiet_cmd_regen_makefile = ACTION Regenerating $@
+cmd_regen_makefile = cd $(srcdir); /Users/eli/.bun/install/global/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/eli/Library/Caches/node-gyp/20.13.1" "-Dnode_gyp_dir=/Users/eli/.bun/install/global/node_modules/node-gyp" "-Dnode_lib_file=/Users/eli/Library/Caches/node-gyp/20.13.1/<(target_arch)/node.lib" "-Dmodule_root_dir=/Users/eli/Code/institute/tour/tree-sitter/dsk/test-build/generated/js" "-Dnode_engine=v8" "--depth=." "-Goutput_dir=." "--generator-output=build" -I/Users/eli/Code/institute/tour/tree-sitter/dsk/test-build/generated/js/build/config.gypi -I/Users/eli/.bun/install/global/node_modules/node-gyp/addon.gypi -I/Users/eli/Library/Caches/node-gyp/20.13.1/include/node/common.gypi "--toplevel-dir=." binding.gyp
+Makefile: $(srcdir)/build/config.gypi $(srcdir)/../../../../../../../../Library/Caches/node-gyp/20.13.1/include/node/common.gypi $(srcdir)/../../../../../../../../.bun/install/global/node_modules/node-gyp/addon.gypi $(srcdir)/binding.gyp $(srcdir)/node_modules/node-addon-api/node_api.gyp
+	$(call do_cmd,regen_makefile)
+
+# "all" is a concatenation of the "all" targets from all the included
+# sub-makefiles. This is just here to clarify.
+all:
+
+# Add in dependency-tracking rules.  $(all_deps) is the list of every single
+# target in our tree. Only consider the ones with .d (dependency) info:
+d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
+ifneq ($(d_files),)
+  include $(d_files)
+endif
diff --git a/tree-sitter/dsk/test-build/generated/js/build/Release/.deps/Release/nothing.a.d b/tree-sitter/dsk/test-build/generated/js/build/Release/.deps/Release/nothing.a.d
new file mode 100644
index 0000000..8197f15
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/Release/.deps/Release/nothing.a.d
@@ -0,0 +1 @@
+cmd_Release/nothing.a := rm -f Release/nothing.a && ./gyp-mac-tool filter-libtool libtool  -static -o Release/nothing.a Release/obj.target/nothing/node_modules/node-addon-api/nothing.o
diff --git a/tree-sitter/dsk/test-build/generated/js/build/Release/nothing.a b/tree-sitter/dsk/test-build/generated/js/build/Release/nothing.a
new file mode 100644
index 0000000..c285b84
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/Release/nothing.a
Binary files differdiff --git a/tree-sitter/dsk/test-build/generated/js/build/binding.Makefile b/tree-sitter/dsk/test-build/generated/js/build/binding.Makefile
new file mode 100644
index 0000000..fb41093
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/binding.Makefile
@@ -0,0 +1,6 @@
+# This file is generated by gyp; do not edit.
+
+export builddir_name ?= ./build/.
+.PHONY: all
+all:
+	$(MAKE) tree_sitter_test_lang_binding
diff --git a/tree-sitter/dsk/test-build/generated/js/build/config.gypi b/tree-sitter/dsk/test-build/generated/js/build/config.gypi
new file mode 100644
index 0000000..a41d769
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/config.gypi
@@ -0,0 +1,424 @@
+# Do not edit. File was generated by node-gyp's "configure" step
+{
+  "target_defaults": {
+    "cflags": [],
+    "default_configuration": "Release",
+    "defines": [],
+    "include_dirs": [],
+    "libraries": [],
+    "msvs_configuration_platform": "ARM64",
+    "xcode_configuration_platform": "arm64"
+  },
+  "variables": {
+    "arm_fpu": "neon",
+    "asan": 0,
+    "coverage": "false",
+    "dcheck_always_on": 0,
+    "debug_nghttp2": "false",
+    "debug_node": "false",
+    "enable_lto": "false",
+    "enable_pgo_generate": "false",
+    "enable_pgo_use": "false",
+    "error_on_warn": "false",
+    "force_dynamic_crt": 0,
+    "host_arch": "arm64",
+    "icu_data_in": "../../deps/icu-tmp/icudt75l.dat",
+    "icu_endianness": "l",
+    "icu_gyp_path": "tools/icu/icu-generic.gyp",
+    "icu_path": "deps/icu-small",
+    "icu_small": "false",
+    "icu_ver_major": "75",
+    "is_debug": 0,
+    "libdir": "lib",
+    "llvm_version": "13.0",
+    "napi_build_version": "9",
+    "node_builtin_shareable_builtins": [
+      "deps/cjs-module-lexer/lexer.js",
+      "deps/cjs-module-lexer/dist/lexer.js",
+      "deps/undici/undici.js"
+    ],
+    "node_byteorder": "little",
+    "node_debug_lib": "false",
+    "node_enable_d8": "false",
+    "node_enable_v8_vtunejit": "false",
+    "node_fipsinstall": "false",
+    "node_install_corepack": "true",
+    "node_install_npm": "true",
+    "node_library_files": [
+      "lib/_http_agent.js",
+      "lib/_http_client.js",
+      "lib/_http_common.js",
+      "lib/_http_incoming.js",
+      "lib/_http_outgoing.js",
+      "lib/_http_server.js",
+      "lib/_stream_duplex.js",
+      "lib/_stream_passthrough.js",
+      "lib/_stream_readable.js",
+      "lib/_stream_transform.js",
+      "lib/_stream_wrap.js",
+      "lib/_stream_writable.js",
+      "lib/_tls_common.js",
+      "lib/_tls_wrap.js",
+      "lib/assert.js",
+      "lib/assert/strict.js",
+      "lib/async_hooks.js",
+      "lib/buffer.js",
+      "lib/child_process.js",
+      "lib/cluster.js",
+      "lib/console.js",
+      "lib/constants.js",
+      "lib/crypto.js",
+      "lib/dgram.js",
+      "lib/diagnostics_channel.js",
+      "lib/dns.js",
+      "lib/dns/promises.js",
+      "lib/domain.js",
+      "lib/events.js",
+      "lib/fs.js",
+      "lib/fs/promises.js",
+      "lib/http.js",
+      "lib/http2.js",
+      "lib/https.js",
+      "lib/inspector.js",
+      "lib/inspector/promises.js",
+      "lib/internal/abort_controller.js",
+      "lib/internal/assert.js",
+      "lib/internal/assert/assertion_error.js",
+      "lib/internal/assert/calltracker.js",
+      "lib/internal/async_hooks.js",
+      "lib/internal/blob.js",
+      "lib/internal/blocklist.js",
+      "lib/internal/bootstrap/node.js",
+      "lib/internal/bootstrap/realm.js",
+      "lib/internal/bootstrap/shadow_realm.js",
+      "lib/internal/bootstrap/switches/does_not_own_process_state.js",
+      "lib/internal/bootstrap/switches/does_own_process_state.js",
+      "lib/internal/bootstrap/switches/is_main_thread.js",
+      "lib/internal/bootstrap/switches/is_not_main_thread.js",
+      "lib/internal/bootstrap/web/exposed-wildcard.js",
+      "lib/internal/bootstrap/web/exposed-window-or-worker.js",
+      "lib/internal/buffer.js",
+      "lib/internal/child_process.js",
+      "lib/internal/child_process/serialization.js",
+      "lib/internal/cli_table.js",
+      "lib/internal/cluster/child.js",
+      "lib/internal/cluster/primary.js",
+      "lib/internal/cluster/round_robin_handle.js",
+      "lib/internal/cluster/shared_handle.js",
+      "lib/internal/cluster/utils.js",
+      "lib/internal/cluster/worker.js",
+      "lib/internal/console/constructor.js",
+      "lib/internal/console/global.js",
+      "lib/internal/constants.js",
+      "lib/internal/crypto/aes.js",
+      "lib/internal/crypto/certificate.js",
+      "lib/internal/crypto/cfrg.js",
+      "lib/internal/crypto/cipher.js",
+      "lib/internal/crypto/diffiehellman.js",
+      "lib/internal/crypto/ec.js",
+      "lib/internal/crypto/hash.js",
+      "lib/internal/crypto/hashnames.js",
+      "lib/internal/crypto/hkdf.js",
+      "lib/internal/crypto/keygen.js",
+      "lib/internal/crypto/keys.js",
+      "lib/internal/crypto/mac.js",
+      "lib/internal/crypto/pbkdf2.js",
+      "lib/internal/crypto/random.js",
+      "lib/internal/crypto/rsa.js",
+      "lib/internal/crypto/scrypt.js",
+      "lib/internal/crypto/sig.js",
+      "lib/internal/crypto/util.js",
+      "lib/internal/crypto/webcrypto.js",
+      "lib/internal/crypto/webidl.js",
+      "lib/internal/crypto/x509.js",
+      "lib/internal/debugger/inspect.js",
+      "lib/internal/debugger/inspect_client.js",
+      "lib/internal/debugger/inspect_repl.js",
+      "lib/internal/dgram.js",
+      "lib/internal/dns/callback_resolver.js",
+      "lib/internal/dns/promises.js",
+      "lib/internal/dns/utils.js",
+      "lib/internal/encoding.js",
+      "lib/internal/error_serdes.js",
+      "lib/internal/errors.js",
+      "lib/internal/event_target.js",
+      "lib/internal/events/abort_listener.js",
+      "lib/internal/events/symbols.js",
+      "lib/internal/file.js",
+      "lib/internal/fixed_queue.js",
+      "lib/internal/freelist.js",
+      "lib/internal/freeze_intrinsics.js",
+      "lib/internal/fs/cp/cp-sync.js",
+      "lib/internal/fs/cp/cp.js",
+      "lib/internal/fs/dir.js",
+      "lib/internal/fs/promises.js",
+      "lib/internal/fs/read/context.js",
+      "lib/internal/fs/recursive_watch.js",
+      "lib/internal/fs/rimraf.js",
+      "lib/internal/fs/streams.js",
+      "lib/internal/fs/sync_write_stream.js",
+      "lib/internal/fs/utils.js",
+      "lib/internal/fs/watchers.js",
+      "lib/internal/heap_utils.js",
+      "lib/internal/histogram.js",
+      "lib/internal/http.js",
+      "lib/internal/http2/compat.js",
+      "lib/internal/http2/core.js",
+      "lib/internal/http2/util.js",
+      "lib/internal/idna.js",
+      "lib/internal/inspector_async_hook.js",
+      "lib/internal/js_stream_socket.js",
+      "lib/internal/legacy/processbinding.js",
+      "lib/internal/linkedlist.js",
+      "lib/internal/main/check_syntax.js",
+      "lib/internal/main/embedding.js",
+      "lib/internal/main/eval_stdin.js",
+      "lib/internal/main/eval_string.js",
+      "lib/internal/main/inspect.js",
+      "lib/internal/main/mksnapshot.js",
+      "lib/internal/main/print_help.js",
+      "lib/internal/main/prof_process.js",
+      "lib/internal/main/repl.js",
+      "lib/internal/main/run_main_module.js",
+      "lib/internal/main/test_runner.js",
+      "lib/internal/main/watch_mode.js",
+      "lib/internal/main/worker_thread.js",
+      "lib/internal/mime.js",
+      "lib/internal/modules/cjs/loader.js",
+      "lib/internal/modules/esm/assert.js",
+      "lib/internal/modules/esm/create_dynamic_module.js",
+      "lib/internal/modules/esm/fetch_module.js",
+      "lib/internal/modules/esm/formats.js",
+      "lib/internal/modules/esm/get_format.js",
+      "lib/internal/modules/esm/hooks.js",
+      "lib/internal/modules/esm/initialize_import_meta.js",
+      "lib/internal/modules/esm/load.js",
+      "lib/internal/modules/esm/loader.js",
+      "lib/internal/modules/esm/module_job.js",
+      "lib/internal/modules/esm/module_map.js",
+      "lib/internal/modules/esm/package_config.js",
+      "lib/internal/modules/esm/resolve.js",
+      "lib/internal/modules/esm/shared_constants.js",
+      "lib/internal/modules/esm/translators.js",
+      "lib/internal/modules/esm/utils.js",
+      "lib/internal/modules/esm/worker.js",
+      "lib/internal/modules/helpers.js",
+      "lib/internal/modules/package_json_reader.js",
+      "lib/internal/modules/run_main.js",
+      "lib/internal/navigator.js",
+      "lib/internal/net.js",
+      "lib/internal/options.js",
+      "lib/internal/per_context/domexception.js",
+      "lib/internal/per_context/messageport.js",
+      "lib/internal/per_context/primordials.js",
+      "lib/internal/perf/event_loop_delay.js",
+      "lib/internal/perf/event_loop_utilization.js",
+      "lib/internal/perf/nodetiming.js",
+      "lib/internal/perf/observe.js",
+      "lib/internal/perf/performance.js",
+      "lib/internal/perf/performance_entry.js",
+      "lib/internal/perf/resource_timing.js",
+      "lib/internal/perf/timerify.js",
+      "lib/internal/perf/usertiming.js",
+      "lib/internal/perf/utils.js",
+      "lib/internal/policy/manifest.js",
+      "lib/internal/policy/sri.js",
+      "lib/internal/priority_queue.js",
+      "lib/internal/process/execution.js",
+      "lib/internal/process/per_thread.js",
+      "lib/internal/process/permission.js",
+      "lib/internal/process/policy.js",
+      "lib/internal/process/pre_execution.js",
+      "lib/internal/process/promises.js",
+      "lib/internal/process/report.js",
+      "lib/internal/process/signal.js",
+      "lib/internal/process/task_queues.js",
+      "lib/internal/process/warning.js",
+      "lib/internal/process/worker_thread_only.js",
+      "lib/internal/promise_hooks.js",
+      "lib/internal/querystring.js",
+      "lib/internal/readline/callbacks.js",
+      "lib/internal/readline/emitKeypressEvents.js",
+      "lib/internal/readline/interface.js",
+      "lib/internal/readline/promises.js",
+      "lib/internal/readline/utils.js",
+      "lib/internal/repl.js",
+      "lib/internal/repl/await.js",
+      "lib/internal/repl/history.js",
+      "lib/internal/repl/utils.js",
+      "lib/internal/socket_list.js",
+      "lib/internal/socketaddress.js",
+      "lib/internal/source_map/prepare_stack_trace.js",
+      "lib/internal/source_map/source_map.js",
+      "lib/internal/source_map/source_map_cache.js",
+      "lib/internal/stream_base_commons.js",
+      "lib/internal/streams/add-abort-signal.js",
+      "lib/internal/streams/compose.js",
+      "lib/internal/streams/destroy.js",
+      "lib/internal/streams/duplex.js",
+      "lib/internal/streams/duplexify.js",
+      "lib/internal/streams/end-of-stream.js",
+      "lib/internal/streams/from.js",
+      "lib/internal/streams/lazy_transform.js",
+      "lib/internal/streams/legacy.js",
+      "lib/internal/streams/operators.js",
+      "lib/internal/streams/passthrough.js",
+      "lib/internal/streams/pipeline.js",
+      "lib/internal/streams/readable.js",
+      "lib/internal/streams/state.js",
+      "lib/internal/streams/transform.js",
+      "lib/internal/streams/utils.js",
+      "lib/internal/streams/writable.js",
+      "lib/internal/test/binding.js",
+      "lib/internal/test/transfer.js",
+      "lib/internal/test_runner/coverage.js",
+      "lib/internal/test_runner/harness.js",
+      "lib/internal/test_runner/mock/mock.js",
+      "lib/internal/test_runner/mock/mock_timers.js",
+      "lib/internal/test_runner/reporter/dot.js",
+      "lib/internal/test_runner/reporter/junit.js",
+      "lib/internal/test_runner/reporter/lcov.js",
+      "lib/internal/test_runner/reporter/spec.js",
+      "lib/internal/test_runner/reporter/tap.js",
+      "lib/internal/test_runner/reporter/v8-serializer.js",
+      "lib/internal/test_runner/runner.js",
+      "lib/internal/test_runner/test.js",
+      "lib/internal/test_runner/tests_stream.js",
+      "lib/internal/test_runner/utils.js",
+      "lib/internal/timers.js",
+      "lib/internal/tls/secure-context.js",
+      "lib/internal/tls/secure-pair.js",
+      "lib/internal/trace_events_async_hooks.js",
+      "lib/internal/tty.js",
+      "lib/internal/url.js",
+      "lib/internal/util.js",
+      "lib/internal/util/colors.js",
+      "lib/internal/util/comparisons.js",
+      "lib/internal/util/debuglog.js",
+      "lib/internal/util/embedding.js",
+      "lib/internal/util/inspect.js",
+      "lib/internal/util/inspector.js",
+      "lib/internal/util/iterable_weak_map.js",
+      "lib/internal/util/parse_args/parse_args.js",
+      "lib/internal/util/parse_args/utils.js",
+      "lib/internal/util/types.js",
+      "lib/internal/v8/startup_snapshot.js",
+      "lib/internal/v8_prof_polyfill.js",
+      "lib/internal/v8_prof_processor.js",
+      "lib/internal/validators.js",
+      "lib/internal/vm.js",
+      "lib/internal/vm/module.js",
+      "lib/internal/wasm_web_api.js",
+      "lib/internal/watch_mode/files_watcher.js",
+      "lib/internal/watchdog.js",
+      "lib/internal/webidl.js",
+      "lib/internal/webstreams/adapters.js",
+      "lib/internal/webstreams/compression.js",
+      "lib/internal/webstreams/encoding.js",
+      "lib/internal/webstreams/queuingstrategies.js",
+      "lib/internal/webstreams/readablestream.js",
+      "lib/internal/webstreams/transfer.js",
+      "lib/internal/webstreams/transformstream.js",
+      "lib/internal/webstreams/util.js",
+      "lib/internal/webstreams/writablestream.js",
+      "lib/internal/worker.js",
+      "lib/internal/worker/io.js",
+      "lib/internal/worker/js_transferable.js",
+      "lib/module.js",
+      "lib/net.js",
+      "lib/os.js",
+      "lib/path.js",
+      "lib/path/posix.js",
+      "lib/path/win32.js",
+      "lib/perf_hooks.js",
+      "lib/process.js",
+      "lib/punycode.js",
+      "lib/querystring.js",
+      "lib/readline.js",
+      "lib/readline/promises.js",
+      "lib/repl.js",
+      "lib/sea.js",
+      "lib/stream.js",
+      "lib/stream/consumers.js",
+      "lib/stream/promises.js",
+      "lib/stream/web.js",
+      "lib/string_decoder.js",
+      "lib/sys.js",
+      "lib/test.js",
+      "lib/test/reporters.js",
+      "lib/timers.js",
+      "lib/timers/promises.js",
+      "lib/tls.js",
+      "lib/trace_events.js",
+      "lib/tty.js",
+      "lib/url.js",
+      "lib/util.js",
+      "lib/util/types.js",
+      "lib/v8.js",
+      "lib/vm.js",
+      "lib/wasi.js",
+      "lib/worker_threads.js",
+      "lib/zlib.js"
+    ],
+    "node_module_version": 115,
+    "node_no_browser_globals": "false",
+    "node_prefix": "/usr/local",
+    "node_release_urlbase": "https://nodejs.org/download/release/",
+    "node_shared": "false",
+    "node_shared_brotli": "false",
+    "node_shared_cares": "false",
+    "node_shared_http_parser": "false",
+    "node_shared_libuv": "false",
+    "node_shared_nghttp2": "false",
+    "node_shared_nghttp3": "false",
+    "node_shared_ngtcp2": "false",
+    "node_shared_openssl": "false",
+    "node_shared_zlib": "false",
+    "node_tag": "",
+    "node_target_type": "executable",
+    "node_use_bundled_v8": "true",
+    "node_use_node_code_cache": "true",
+    "node_use_node_snapshot": "true",
+    "node_use_openssl": "true",
+    "node_use_v8_platform": "true",
+    "node_with_ltcg": "false",
+    "node_without_node_options": "false",
+    "node_write_snapshot_as_array_literals": "false",
+    "openssl_is_fips": "false",
+    "openssl_quic": "true",
+    "ossfuzz": "false",
+    "shlib_suffix": "115.dylib",
+    "single_executable_application": "true",
+    "target_arch": "arm64",
+    "ubsan": 0,
+    "use_prefix_to_find_headers": "false",
+    "v8_enable_31bit_smis_on_64bit_arch": 0,
+    "v8_enable_extensible_ro_snapshot": 0,
+    "v8_enable_gdbjit": 0,
+    "v8_enable_hugepage": 0,
+    "v8_enable_i18n_support": 1,
+    "v8_enable_inspector": 1,
+    "v8_enable_javascript_promise_hooks": 1,
+    "v8_enable_lite_mode": 0,
+    "v8_enable_maglev": 0,
+    "v8_enable_object_print": 1,
+    "v8_enable_pointer_compression": 0,
+    "v8_enable_shared_ro_heap": 1,
+    "v8_enable_v8_checks": 0,
+    "v8_enable_webassembly": 1,
+    "v8_no_strict_aliasing": 1,
+    "v8_optimized_debug": 1,
+    "v8_promise_internal_field_count": 1,
+    "v8_random_seed": 0,
+    "v8_trace_maps": 0,
+    "v8_use_siphash": 1,
+    "want_separate_host_toolset": 0,
+    "xcode_version": "13.0",
+    "nodedir": "/Users/eli/Library/Caches/node-gyp/20.13.1",
+    "python": "/Applications/Xcode.app/Contents/Developer/usr/bin/python3",
+    "standalone_static_library": 1,
+    "local_prefix": "/Users/eli/Code/institute/tour/tree-sitter/dsk/test-build/generated/js/",
+    "yes": "true",
+    "user_agent": "bun/1.1.29 npm/? node/v22.6.0 darwin arm64"
+  }
+}
diff --git a/tree-sitter/dsk/test-build/generated/js/build/gyp-mac-tool b/tree-sitter/dsk/test-build/generated/js/build/gyp-mac-tool
new file mode 100755
index 0000000..ffef860
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/gyp-mac-tool
@@ -0,0 +1,772 @@
+#!/usr/bin/env python3
+# Generated by gyp. Do not edit.
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions to perform Xcode-style build steps.
+
+These functions are executed via gyp-mac-tool when using the Makefile generator.
+"""
+
+
+import fcntl
+import fnmatch
+import glob
+import json
+import os
+import plistlib
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+def main(args):
+    executor = MacTool()
+    exit_code = executor.Dispatch(args)
+    if exit_code is not None:
+        sys.exit(exit_code)
+
+
+class MacTool:
+    """This class performs all the Mac tooling steps. The methods can either be
+  executed directly, or dispatched from an argument list."""
+
+    def Dispatch(self, args):
+        """Dispatches a string command to a method."""
+        if len(args) < 1:
+            raise Exception("Not enough arguments")
+
+        method = "Exec%s" % self._CommandifyName(args[0])
+        return getattr(self, method)(*args[1:])
+
+    def _CommandifyName(self, name_string):
+        """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
+        return name_string.title().replace("-", "")
+
+    def ExecCopyBundleResource(self, source, dest, convert_to_binary):
+        """Copies a resource file to the bundle/Resources directory, performing any
+    necessary compilation on each resource."""
+        convert_to_binary = convert_to_binary == "True"
+        extension = os.path.splitext(source)[1].lower()
+        if os.path.isdir(source):
+            # Copy tree.
+            # TODO(thakis): This copies file attributes like mtime, while the
+            # single-file branch below doesn't. This should probably be changed to
+            # be consistent with the single-file branch.
+            if os.path.exists(dest):
+                shutil.rmtree(dest)
+            shutil.copytree(source, dest)
+        elif extension == ".xib":
+            return self._CopyXIBFile(source, dest)
+        elif extension == ".storyboard":
+            return self._CopyXIBFile(source, dest)
+        elif extension == ".strings" and not convert_to_binary:
+            self._CopyStringsFile(source, dest)
+        else:
+            if os.path.exists(dest):
+                os.unlink(dest)
+            shutil.copy(source, dest)
+
+        if convert_to_binary and extension in (".plist", ".strings"):
+            self._ConvertToBinary(dest)
+
+    def _CopyXIBFile(self, source, dest):
+        """Compiles a XIB file with ibtool into a binary plist in the bundle."""
+
+        # ibtool sometimes crashes with relative paths. See crbug.com/314728.
+        base = os.path.dirname(os.path.realpath(__file__))
+        if os.path.relpath(source):
+            source = os.path.join(base, source)
+        if os.path.relpath(dest):
+            dest = os.path.join(base, dest)
+
+        args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"]
+
+        if os.environ["XCODE_VERSION_ACTUAL"] > "0700":
+            args.extend(["--auto-activate-custom-fonts"])
+            if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ:
+                args.extend(
+                    [
+                        "--target-device",
+                        "iphone",
+                        "--target-device",
+                        "ipad",
+                        "--minimum-deployment-target",
+                        os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
+                    ]
+                )
+            else:
+                args.extend(
+                    [
+                        "--target-device",
+                        "mac",
+                        "--minimum-deployment-target",
+                        os.environ["MACOSX_DEPLOYMENT_TARGET"],
+                    ]
+                )
+
+        args.extend(
+            ["--output-format", "human-readable-text", "--compile", dest, source]
+        )
+
+        ibtool_section_re = re.compile(r"/\*.*\*/")
+        ibtool_re = re.compile(r".*note:.*is clipping its content")
+        try:
+            stdout = subprocess.check_output(args)
+        except subprocess.CalledProcessError as e:
+            print(e.output)
+            raise
+        current_section_header = None
+        for line in stdout.splitlines():
+            if ibtool_section_re.match(line):
+                current_section_header = line
+            elif not ibtool_re.match(line):
+                if current_section_header:
+                    print(current_section_header)
+                    current_section_header = None
+                print(line)
+        return 0
+
+    def _ConvertToBinary(self, dest):
+        subprocess.check_call(
+            ["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest]
+        )
+
+    def _CopyStringsFile(self, source, dest):
+        """Copies a .strings file using iconv to reconvert the input into UTF-16."""
+        input_code = self._DetectInputEncoding(source) or "UTF-8"
+
+        # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
+        # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
+        #     CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
+        #     semicolon in dictionary.
+        # on invalid files. Do the same kind of validation.
+        import CoreFoundation
+
+        with open(source, "rb") as in_file:
+            s = in_file.read()
+        d = CoreFoundation.CFDataCreate(None, s, len(s))
+        _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
+        if error:
+            return
+
+        with open(dest, "wb") as fp:
+            fp.write(s.decode(input_code).encode("UTF-16"))
+
+    def _DetectInputEncoding(self, file_name):
+        """Reads the first few bytes from file_name and tries to guess the text
+    encoding. Returns None as a guess if it can't detect it."""
+        with open(file_name, "rb") as fp:
+            try:
+                header = fp.read(3)
+            except Exception:
+                return None
+        if header.startswith(b"\xFE\xFF"):
+            return "UTF-16"
+        elif header.startswith(b"\xFF\xFE"):
+            return "UTF-16"
+        elif header.startswith(b"\xEF\xBB\xBF"):
+            return "UTF-8"
+        else:
+            return None
+
+    def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
+        """Copies the |source| Info.plist to the destination directory |dest|."""
+        # Read the source Info.plist into memory.
+        with open(source) as fd:
+            lines = fd.read()
+
+        # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+        plist = plistlib.readPlistFromString(lines)
+        if keys:
+            plist.update(json.loads(keys[0]))
+        lines = plistlib.writePlistToString(plist)
+
+        # Go through all the environment variables and replace them as variables in
+        # the file.
+        IDENT_RE = re.compile(r"[_/\s]")
+        for key in os.environ:
+            if key.startswith("_"):
+                continue
+            evar = "${%s}" % key
+            evalue = os.environ[key]
+            lines = lines.replace(lines, evar, evalue)
+
+            # Xcode supports various suffices on environment variables, which are
+            # all undocumented. :rfc1034identifier is used in the standard project
+            # template these days, and :identifier was used earlier. They are used to
+            # convert non-url characters into things that look like valid urls --
+            # except that the replacement character for :identifier, '_' isn't valid
+            # in a URL either -- oops, hence :rfc1034identifier was born.
+            evar = "${%s:identifier}" % key
+            evalue = IDENT_RE.sub("_", os.environ[key])
+            lines = lines.replace(lines, evar, evalue)
+
+            evar = "${%s:rfc1034identifier}" % key
+            evalue = IDENT_RE.sub("-", os.environ[key])
+            lines = lines.replace(lines, evar, evalue)
+
+        # Remove any keys with values that haven't been replaced.
+        lines = lines.splitlines()
+        for i in range(len(lines)):
+            if lines[i].strip().startswith("<string>${"):
+                lines[i] = None
+                lines[i - 1] = None
+        lines = "\n".join(line for line in lines if line is not None)
+
+        # Write out the file with variables replaced.
+        with open(dest, "w") as fd:
+            fd.write(lines)
+
+        # Now write out PkgInfo file now that the Info.plist file has been
+        # "compiled".
+        self._WritePkgInfo(dest)
+
+        if convert_to_binary == "True":
+            self._ConvertToBinary(dest)
+
+    def _WritePkgInfo(self, info_plist):
+        """This writes the PkgInfo file from the data stored in Info.plist."""
+        plist = plistlib.readPlist(info_plist)
+        if not plist:
+            return
+
+        # Only create PkgInfo for executable types.
+        package_type = plist["CFBundlePackageType"]
+        if package_type != "APPL":
+            return
+
+        # The format of PkgInfo is eight characters, representing the bundle type
+        # and bundle signature, each four characters. If that is missing, four
+        # '?' characters are used instead.
+        signature_code = plist.get("CFBundleSignature", "????")
+        if len(signature_code) != 4:  # Wrong length resets everything, too.
+            signature_code = "?" * 4
+
+        dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
+        with open(dest, "w") as fp:
+            fp.write(f"{package_type}{signature_code}")
+
+    def ExecFlock(self, lockfile, *cmd_list):
+        """Emulates the most basic behavior of Linux's flock(1)."""
+        # Rely on exception handling to report errors.
+        fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
+        fcntl.flock(fd, fcntl.LOCK_EX)
+        return subprocess.call(cmd_list)
+
+    def ExecFilterLibtool(self, *cmd_list):
+        """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
+    symbols'."""
+        libtool_re = re.compile(
+            r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
+        )
+        libtool_re5 = re.compile(
+            r"^.*libtool: warning for library: "
+            + r".* the table of contents is empty "
+            + r"\(no object file members in the library define global symbols\)$"
+        )
+        env = os.environ.copy()
+        # Ref:
+        # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+        # The problem with this flag is that it resets the file mtime on the file to
+        # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+        env["ZERO_AR_DATE"] = "1"
+        libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+        err = libtoolout.communicate()[1].decode("utf-8")
+        for line in err.splitlines():
+            if not libtool_re.match(line) and not libtool_re5.match(line):
+                print(line, file=sys.stderr)
+        # Unconditionally touch the output .a file on the command line if present
+        # and the command succeeded. A bit hacky.
+        if not libtoolout.returncode:
+            for i in range(len(cmd_list) - 1):
+                if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"):
+                    os.utime(cmd_list[i + 1], None)
+                    break
+        return libtoolout.returncode
+
+    def ExecPackageIosFramework(self, framework):
+        # Find the name of the binary based on the part before the ".framework".
+        binary = os.path.basename(framework).split(".")[0]
+        module_path = os.path.join(framework, "Modules")
+        if not os.path.exists(module_path):
+            os.mkdir(module_path)
+        module_template = (
+            "framework module %s {\n"
+            '  umbrella header "%s.h"\n'
+            "\n"
+            "  export *\n"
+            "  module * { export * }\n"
+            "}\n" % (binary, binary)
+        )
+
+        with open(os.path.join(module_path, "module.modulemap"), "w") as module_file:
+            module_file.write(module_template)
+
+    def ExecPackageFramework(self, framework, version):
+        """Takes a path to Something.framework and the Current version of that and
+    sets up all the symlinks."""
+        # Find the name of the binary based on the part before the ".framework".
+        binary = os.path.basename(framework).split(".")[0]
+
+        CURRENT = "Current"
+        RESOURCES = "Resources"
+        VERSIONS = "Versions"
+
+        if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
+            # Binary-less frameworks don't seem to contain symlinks (see e.g.
+            # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
+            return
+
+        # Move into the framework directory to set the symlinks correctly.
+        pwd = os.getcwd()
+        os.chdir(framework)
+
+        # Set up the Current version.
+        self._Relink(version, os.path.join(VERSIONS, CURRENT))
+
+        # Set up the root symlinks.
+        self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
+        self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
+
+        # Back to where we were before!
+        os.chdir(pwd)
+
+    def _Relink(self, dest, link):
+        """Creates a symlink to |dest| named |link|. If |link| already exists,
+    it is overwritten."""
+        if os.path.lexists(link):
+            os.remove(link)
+        os.symlink(dest, link)
+
+    def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
+        framework_name = os.path.basename(framework).split(".")[0]
+        all_headers = [os.path.abspath(header) for header in all_headers]
+        filelist = {}
+        for header in all_headers:
+            filename = os.path.basename(header)
+            filelist[filename] = header
+            filelist[os.path.join(framework_name, filename)] = header
+        WriteHmap(out, filelist)
+
+    def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
+        header_path = os.path.join(framework, "Headers")
+        if not os.path.exists(header_path):
+            os.makedirs(header_path)
+        for header in copy_headers:
+            shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
+
+    def ExecCompileXcassets(self, keys, *inputs):
+        """Compiles multiple .xcassets files into a single .car file.
+
+    This invokes 'actool' to compile all the inputs .xcassets files. The
+    |keys| arguments is a json-encoded dictionary of extra arguments to
+    pass to 'actool' when the asset catalogs contains an application icon
+    or a launch image.
+
+    Note that 'actool' does not create the Assets.car file if the asset
+    catalogs does not contains imageset.
+    """
+        command_line = [
+            "xcrun",
+            "actool",
+            "--output-format",
+            "human-readable-text",
+            "--compress-pngs",
+            "--notices",
+            "--warnings",
+            "--errors",
+        ]
+        is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ
+        if is_iphone_target:
+            platform = os.environ["CONFIGURATION"].split("-")[-1]
+            if platform not in ("iphoneos", "iphonesimulator"):
+                platform = "iphonesimulator"
+            command_line.extend(
+                [
+                    "--platform",
+                    platform,
+                    "--target-device",
+                    "iphone",
+                    "--target-device",
+                    "ipad",
+                    "--minimum-deployment-target",
+                    os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
+                    "--compile",
+                    os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]),
+                ]
+            )
+        else:
+            command_line.extend(
+                [
+                    "--platform",
+                    "macosx",
+                    "--target-device",
+                    "mac",
+                    "--minimum-deployment-target",
+                    os.environ["MACOSX_DEPLOYMENT_TARGET"],
+                    "--compile",
+                    os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]),
+                ]
+            )
+        if keys:
+            keys = json.loads(keys)
+            for key, value in keys.items():
+                arg_name = "--" + key
+                if isinstance(value, bool):
+                    if value:
+                        command_line.append(arg_name)
+                elif isinstance(value, list):
+                    for v in value:
+                        command_line.append(arg_name)
+                        command_line.append(str(v))
+                else:
+                    command_line.append(arg_name)
+                    command_line.append(str(value))
+        # Note: actool crashes if inputs path are relative, so use os.path.abspath
+        # to get absolute path name for inputs.
+        command_line.extend(map(os.path.abspath, inputs))
+        subprocess.check_call(command_line)
+
+    def ExecMergeInfoPlist(self, output, *inputs):
+        """Merge multiple .plist files into a single .plist file."""
+        merged_plist = {}
+        for path in inputs:
+            plist = self._LoadPlistMaybeBinary(path)
+            self._MergePlist(merged_plist, plist)
+        plistlib.writePlist(merged_plist, output)
+
+    def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
+        """Code sign a bundle.
+
+    This function tries to code sign an iOS bundle, following the same
+    algorithm as Xcode:
+      1. pick the provisioning profile that best match the bundle identifier,
+         and copy it into the bundle as embedded.mobileprovision,
+      2. copy Entitlements.plist from user or SDK next to the bundle,
+      3. code sign the bundle.
+    """
+        substitutions, overrides = self._InstallProvisioningProfile(
+            provisioning, self._GetCFBundleIdentifier()
+        )
+        entitlements_path = self._InstallEntitlements(
+            entitlements, substitutions, overrides
+        )
+
+        args = ["codesign", "--force", "--sign", key]
+        if preserve == "True":
+            args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
+        else:
+            args.extend(["--entitlements", entitlements_path])
+        args.extend(["--timestamp=none", path])
+        subprocess.check_call(args)
+
+    def _InstallProvisioningProfile(self, profile, bundle_identifier):
+        """Installs embedded.mobileprovision into the bundle.
+
+    Args:
+      profile: string, optional, short name of the .mobileprovision file
+        to use, if empty or the file is missing, the best file installed
+        will be used
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+    Returns:
+      A tuple containing two dictionary: variables substitutions and values
+      to overrides when generating the entitlements file.
+    """
+        source_path, provisioning_data, team_id = self._FindProvisioningProfile(
+            profile, bundle_identifier
+        )
+        target_path = os.path.join(
+            os.environ["BUILT_PRODUCTS_DIR"],
+            os.environ["CONTENTS_FOLDER_PATH"],
+            "embedded.mobileprovision",
+        )
+        shutil.copy2(source_path, target_path)
+        substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".")
+        return substitutions, provisioning_data["Entitlements"]
+
+    def _FindProvisioningProfile(self, profile, bundle_identifier):
+        """Finds the .mobileprovision file to use for signing the bundle.
+
+    Checks all the installed provisioning profiles (or if the user specified
+    the PROVISIONING_PROFILE variable, only consult it) and select the most
+    specific that correspond to the bundle identifier.
+
+    Args:
+      profile: string, optional, short name of the .mobileprovision file
+        to use, if empty or the file is missing, the best file installed
+        will be used
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+    Returns:
+      A tuple of the path to the selected provisioning profile, the data of
+      the embedded plist in the provisioning profile and the team identifier
+      to use for code signing.
+
+    Raises:
+      SystemExit: if no .mobileprovision can be used to sign the bundle.
+    """
+        profiles_dir = os.path.join(
+            os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
+        )
+        if not os.path.isdir(profiles_dir):
+            print(
+                "cannot find mobile provisioning for %s" % (bundle_identifier),
+                file=sys.stderr,
+            )
+            sys.exit(1)
+        provisioning_profiles = None
+        if profile:
+            profile_path = os.path.join(profiles_dir, profile + ".mobileprovision")
+            if os.path.exists(profile_path):
+                provisioning_profiles = [profile_path]
+        if not provisioning_profiles:
+            provisioning_profiles = glob.glob(
+                os.path.join(profiles_dir, "*.mobileprovision")
+            )
+        valid_provisioning_profiles = {}
+        for profile_path in provisioning_profiles:
+            profile_data = self._LoadProvisioningProfile(profile_path)
+            app_id_pattern = profile_data.get("Entitlements", {}).get(
+                "application-identifier", ""
+            )
+            for team_identifier in profile_data.get("TeamIdentifier", []):
+                app_id = f"{team_identifier}.{bundle_identifier}"
+                if fnmatch.fnmatch(app_id, app_id_pattern):
+                    valid_provisioning_profiles[app_id_pattern] = (
+                        profile_path,
+                        profile_data,
+                        team_identifier,
+                    )
+        if not valid_provisioning_profiles:
+            print(
+                "cannot find mobile provisioning for %s" % (bundle_identifier),
+                file=sys.stderr,
+            )
+            sys.exit(1)
+        # If the user has multiple provisioning profiles installed that can be
+        # used for ${bundle_identifier}, pick the most specific one (ie. the
+        # provisioning profile whose pattern is the longest).
+        selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
+        return valid_provisioning_profiles[selected_key]
+
+    def _LoadProvisioningProfile(self, profile_path):
+        """Extracts the plist embedded in a provisioning profile.
+
+    Args:
+      profile_path: string, path to the .mobileprovision file
+
+    Returns:
+      Content of the plist embedded in the provisioning profile as a dictionary.
+    """
+        with tempfile.NamedTemporaryFile() as temp:
+            subprocess.check_call(
+                ["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
+            )
+            return self._LoadPlistMaybeBinary(temp.name)
+
+    def _MergePlist(self, merged_plist, plist):
+        """Merge |plist| into |merged_plist|."""
+        for key, value in plist.items():
+            if isinstance(value, dict):
+                merged_value = merged_plist.get(key, {})
+                if isinstance(merged_value, dict):
+                    self._MergePlist(merged_value, value)
+                    merged_plist[key] = merged_value
+                else:
+                    merged_plist[key] = value
+            else:
+                merged_plist[key] = value
+
+    def _LoadPlistMaybeBinary(self, plist_path):
+        """Loads into a memory a plist possibly encoded in binary format.
+
+    This is a wrapper around plistlib.readPlist that tries to convert the
+    plist to the XML format if it can't be parsed (assuming that it is in
+    the binary format).
+
+    Args:
+      plist_path: string, path to a plist file, in XML or binary format
+
+    Returns:
+      Content of the plist as a dictionary.
+    """
+        try:
+            # First, try to read the file using plistlib that only supports XML,
+            # and if an exception is raised, convert a temporary copy to XML and
+            # load that copy.
+            return plistlib.readPlist(plist_path)
+        except Exception:
+            pass
+        with tempfile.NamedTemporaryFile() as temp:
+            shutil.copy2(plist_path, temp.name)
+            subprocess.check_call(["plutil", "-convert", "xml1", temp.name])
+            return plistlib.readPlist(temp.name)
+
+    def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
+        """Constructs a dictionary of variable substitutions for Entitlements.plist.
+
+    Args:
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+      app_identifier_prefix: string, value for AppIdentifierPrefix
+
+    Returns:
+      Dictionary of substitutions to apply when generating Entitlements.plist.
+    """
+        return {
+            "CFBundleIdentifier": bundle_identifier,
+            "AppIdentifierPrefix": app_identifier_prefix,
+        }
+
+    def _GetCFBundleIdentifier(self):
+        """Extracts CFBundleIdentifier value from Info.plist in the bundle.
+
+    Returns:
+      Value of CFBundleIdentifier in the Info.plist located in the bundle.
+    """
+        info_plist_path = os.path.join(
+            os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
+        )
+        info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
+        return info_plist_data["CFBundleIdentifier"]
+
+    def _InstallEntitlements(self, entitlements, substitutions, overrides):
+        """Generates and install the ${BundleName}.xcent entitlements file.
+
+    Expands variables "$(variable)" pattern in the source entitlements file,
+    add extra entitlements defined in the .mobileprovision file and the copy
+    the generated plist to "${BundlePath}.xcent".
+
+    Args:
+      entitlements: string, optional, path to the Entitlements.plist template
+        to use, defaults to "${SDKROOT}/Entitlements.plist"
+      substitutions: dictionary, variable substitutions
+      overrides: dictionary, values to add to the entitlements
+
+    Returns:
+      Path to the generated entitlements file.
+    """
+        source_path = entitlements
+        target_path = os.path.join(
+            os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
+        )
+        if not source_path:
+            source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist")
+        shutil.copy2(source_path, target_path)
+        data = self._LoadPlistMaybeBinary(target_path)
+        data = self._ExpandVariables(data, substitutions)
+        if overrides:
+            for key in overrides:
+                if key not in data:
+                    data[key] = overrides[key]
+        plistlib.writePlist(data, target_path)
+        return target_path
+
+    def _ExpandVariables(self, data, substitutions):
+        """Expands variables "$(variable)" in data.
+
+    Args:
+      data: object, can be either string, list or dictionary
+      substitutions: dictionary, variable substitutions to perform
+
+    Returns:
+      Copy of data where each references to "$(variable)" has been replaced
+      by the corresponding value found in substitutions, or left intact if
+      the key was not found.
+    """
+        if isinstance(data, str):
+            for key, value in substitutions.items():
+                data = data.replace("$(%s)" % key, value)
+            return data
+        if isinstance(data, list):
+            return [self._ExpandVariables(v, substitutions) for v in data]
+        if isinstance(data, dict):
+            return {k: self._ExpandVariables(data[k], substitutions) for k in data}
+        return data
+
+
+def NextGreaterPowerOf2(x):
+    return 2 ** (x).bit_length()
+
+
+def WriteHmap(output_name, filelist):
+    """Generates a header map based on |filelist|.
+
+  Per Mark Mentovai:
+    A header map is structured essentially as a hash table, keyed by names used
+    in #includes, and providing pathnames to the actual files.
+
+  The implementation below and the comment above comes from inspecting:
+    http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+  while also looking at the implementation in clang in:
+    https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+  """
+    magic = 1751998832
+    version = 1
+    _reserved = 0
+    count = len(filelist)
+    capacity = NextGreaterPowerOf2(count)
+    strings_offset = 24 + (12 * capacity)
+    max_value_length = max(len(value) for value in filelist.values())
+
+    out = open(output_name, "wb")
+    out.write(
+        struct.pack(
+            "<LHHLLLL",
+            magic,
+            version,
+            _reserved,
+            strings_offset,
+            count,
+            capacity,
+            max_value_length,
+        )
+    )
+
+    # Create empty hashmap buckets.
+    buckets = [None] * capacity
+    for file, path in filelist.items():
+        key = 0
+        for c in file:
+            key += ord(c.lower()) * 13
+
+        # Fill next empty bucket.
+        while buckets[key & capacity - 1] is not None:
+            key = key + 1
+        buckets[key & capacity - 1] = (file, path)
+
+    next_offset = 1
+    for bucket in buckets:
+        if bucket is None:
+            out.write(struct.pack("<LLL", 0, 0, 0))
+        else:
+            (file, path) = bucket
+            key_offset = next_offset
+            prefix_offset = key_offset + len(file) + 1
+            suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+            next_offset = suffix_offset + len(os.path.basename(path)) + 1
+            out.write(struct.pack("<LLL", key_offset, prefix_offset, suffix_offset))
+
+    # Pad byte since next offset starts at 1.
+    out.write(struct.pack("<x"))
+
+    for bucket in buckets:
+        if bucket is not None:
+            (file, path) = bucket
+            out.write(struct.pack("<%ds" % len(file), file))
+            out.write(struct.pack("<s", "\0"))
+            base = os.path.dirname(path) + os.sep
+            out.write(struct.pack("<%ds" % len(base), base))
+            out.write(struct.pack("<s", "\0"))
+            path = os.path.basename(path)
+            out.write(struct.pack("<%ds" % len(path), path))
+            out.write(struct.pack("<s", "\0"))
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))
diff --git a/tree-sitter/dsk/test-build/generated/js/build/tree_sitter_test_lang_binding.target.mk b/tree-sitter/dsk/test-build/generated/js/build/tree_sitter_test_lang_binding.target.mk
new file mode 100644
index 0000000..384e3d3
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/build/tree_sitter_test_lang_binding.target.mk
@@ -0,0 +1,206 @@
+# This file is generated by gyp; do not edit.
+
+TOOLSET := target
+TARGET := tree_sitter_test_lang_binding
+DEFS_Debug := \
+	'-DNODE_GYP_MODULE_NAME=tree_sitter_test_lang_binding' \
+	'-DUSING_UV_SHARED=1' \
+	'-DUSING_V8_SHARED=1' \
+	'-DV8_DEPRECATION_WARNINGS=1' \
+	'-D_GLIBCXX_USE_CXX11_ABI=1' \
+	'-D_DARWIN_USE_64_BIT_INODE=1' \
+	'-D_LARGEFILE_SOURCE' \
+	'-D_FILE_OFFSET_BITS=64' \
+	'-DOPENSSL_NO_PINSHARED' \
+	'-DOPENSSL_THREADS' \
+	'-DNAPI_DISABLE_CPP_EXCEPTIONS' \
+	'-DBUILDING_NODE_EXTENSION' \
+	'-DDEBUG' \
+	'-D_DEBUG'
+
+# Flags passed to all source files.
+CFLAGS_Debug := \
+	-O0 \
+	-gdwarf-2 \
+	-mmacosx-version-min=10.15 \
+	-arch \
+	arm64 \
+	-Wall \
+	-Wendif-labels \
+	-W \
+	-Wno-unused-parameter
+
+# Flags passed to only C files.
+CFLAGS_C_Debug := \
+	-fno-strict-aliasing
+
+# Flags passed to only C++ files.
+CFLAGS_CC_Debug := \
+	-std=gnu++17 \
+	-stdlib=libc++ \
+	-fno-rtti \
+	-fno-exceptions \
+	-fno-strict-aliasing
+
+# Flags passed to only ObjC files.
+CFLAGS_OBJC_Debug :=
+
+# Flags passed to only ObjC++ files.
+CFLAGS_OBJCC_Debug :=
+
+INCS_Debug := \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/include/node \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/src \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/openssl/config \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/openssl/openssl/include \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/uv/include \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/zlib \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/v8/include \
+	"-I$(srcdir)/\"/Users/eli/Code/institute/tour/tree-sitter/dsk/test-build/generated/js/node_modules/node-addon-api\"" \
+	-I$(srcdir)/src
+
+DEFS_Release := \
+	'-DNODE_GYP_MODULE_NAME=tree_sitter_test_lang_binding' \
+	'-DUSING_UV_SHARED=1' \
+	'-DUSING_V8_SHARED=1' \
+	'-DV8_DEPRECATION_WARNINGS=1' \
+	'-D_GLIBCXX_USE_CXX11_ABI=1' \
+	'-D_DARWIN_USE_64_BIT_INODE=1' \
+	'-D_LARGEFILE_SOURCE' \
+	'-D_FILE_OFFSET_BITS=64' \
+	'-DOPENSSL_NO_PINSHARED' \
+	'-DOPENSSL_THREADS' \
+	'-DNAPI_DISABLE_CPP_EXCEPTIONS' \
+	'-DBUILDING_NODE_EXTENSION'
+
+# Flags passed to all source files.
+CFLAGS_Release := \
+	-O3 \
+	-gdwarf-2 \
+	-mmacosx-version-min=10.15 \
+	-arch \
+	arm64 \
+	-Wall \
+	-Wendif-labels \
+	-W \
+	-Wno-unused-parameter
+
+# Flags passed to only C files.
+CFLAGS_C_Release := \
+	-fno-strict-aliasing
+
+# Flags passed to only C++ files.
+CFLAGS_CC_Release := \
+	-std=gnu++17 \
+	-stdlib=libc++ \
+	-fno-rtti \
+	-fno-exceptions \
+	-fno-strict-aliasing
+
+# Flags passed to only ObjC files.
+CFLAGS_OBJC_Release :=
+
+# Flags passed to only ObjC++ files.
+CFLAGS_OBJCC_Release :=
+
+INCS_Release := \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/include/node \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/src \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/openssl/config \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/openssl/openssl/include \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/uv/include \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/zlib \
+	-I/Users/eli/Library/Caches/node-gyp/20.13.1/deps/v8/include \
+	"-I$(srcdir)/\"/Users/eli/Code/institute/tour/tree-sitter/dsk/test-build/generated/js/node_modules/node-addon-api\"" \
+	-I$(srcdir)/src
+
+OBJS := \
+	$(obj).target/$(TARGET)/bindings/node.o \
+	$(obj).target/$(TARGET)/src/parser.o
+
+# Add to the list of files we specially track dependencies for.
+all_deps += $(OBJS)
+
+# Make sure our dependencies are built before any of us.
+$(OBJS): | $(builddir)/nothing.a
+
+# CFLAGS et al overrides must be target-local.
+# See "Target-specific Variable Values" in the GNU Make manual.
+$(OBJS): TOOLSET := $(TOOLSET)
+$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))  $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
+$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))  $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
+$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))  $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))
+$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE))  $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))
+
+# Suffix rules, putting all outputs into $(obj).
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+
+# Try building from generated source, too.
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+
+# End of this set of suffix rules
+### Rules for final target.
+LDFLAGS_Debug := \
+	-undefined dynamic_lookup \
+	-Wl,-search_paths_first \
+	-mmacosx-version-min=10.15 \
+	-arch \
+	arm64 \
+	-L$(builddir) \
+	-stdlib=libc++
+
+LIBTOOLFLAGS_Debug := \
+	-undefined dynamic_lookup \
+	-Wl,-search_paths_first
+
+LDFLAGS_Release := \
+	-undefined dynamic_lookup \
+	-Wl,-search_paths_first \
+	-mmacosx-version-min=10.15 \
+	-arch \
+	arm64 \
+	-L$(builddir) \
+	-stdlib=libc++
+
+LIBTOOLFLAGS_Release := \
+	-undefined dynamic_lookup \
+	-Wl,-search_paths_first
+
+LIBS :=
+
+$(builddir)/tree_sitter_test_lang_binding.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
+$(builddir)/tree_sitter_test_lang_binding.node: LIBS := $(LIBS)
+$(builddir)/tree_sitter_test_lang_binding.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))
+$(builddir)/tree_sitter_test_lang_binding.node: TOOLSET := $(TOOLSET)
+$(builddir)/tree_sitter_test_lang_binding.node: $(OBJS) $(builddir)/nothing.a FORCE_DO_CMD
+	$(call do_cmd,solink_module)
+
+all_deps += $(builddir)/tree_sitter_test_lang_binding.node
+# Add target alias
+.PHONY: tree_sitter_test_lang_binding
+tree_sitter_test_lang_binding: $(builddir)/tree_sitter_test_lang_binding.node
+
+# Short alias for building this executable.
+.PHONY: tree_sitter_test_lang_binding.node
+tree_sitter_test_lang_binding.node: $(builddir)/tree_sitter_test_lang_binding.node
+
+# Add executable to "all" target.
+.PHONY: all
+all: $(builddir)/tree_sitter_test_lang_binding.node
+
diff --git a/tree-sitter/dsk/test-build/generated/js/bun.lockb b/tree-sitter/dsk/test-build/generated/js/bun.lockb
new file mode 100755
index 0000000..6878a7c
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/bun.lockb
Binary files differdiff --git a/tree-sitter/dsk/test-build/generated/js/index.d.ts b/tree-sitter/dsk/test-build/generated/js/index.d.ts
new file mode 100644
index 0000000..d25eae0
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/index.d.ts
@@ -0,0 +1,3 @@
+declare const _exports: any;
+export = _exports;
+//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/generated/js/index.d.ts.map b/tree-sitter/dsk/test-build/generated/js/index.d.ts.map
new file mode 100644
index 0000000..ca7a93a
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/index.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.js"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/generated/js/index.js b/tree-sitter/dsk/test-build/generated/js/index.js
new file mode 100644
index 0000000..af85b7a
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/index.js
@@ -0,0 +1,15 @@
+try {
+  module.exports = require("./build/Release/tree_sitter_test_lang_binding");
+} catch (error1) {
+  if (error1.code !== 'MODULE_NOT_FOUND') {
+    throw error1;
+  }
+  try {
+    module.exports = require("./build/Debug/tree_sitter_test_lang_binding");
+  } catch (error2) {
+    if (error2.code !== 'MODULE_NOT_FOUND') {
+      throw error2;
+    }
+    throw error1
+  }
+}
diff --git a/tree-sitter/dsk/test-build/generated/js/package.json b/tree-sitter/dsk/test-build/generated/js/package.json
new file mode 100644
index 0000000..2005ec6
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/package.json
@@ -0,0 +1,30 @@
+{
+  "name": "tree-sitter-test_lang",
+  "version": "1.0.0",
+  "description": "Tree-sitter parser for test_lang",
+  "main": "index.js",
+  "keywords": [
+    "tree-sitter",
+    "parser",
+    "test_lang"
+  ],
+  "author": "Generated by DSK",
+  "license": "MIT",
+  "dependencies": {
+    "node-addon-api": "^7.0.0"
+  },
+  "devDependencies": {
+    "tree-sitter-cli": "^0.20.0"
+  },
+  "scripts": {
+    "install": "node-gyp rebuild",
+    "test": "tree-sitter test"
+  },
+  "gypfile": true,
+  "files": [
+    "grammar.js",
+    "src",
+    "index.js",
+    "binding.gyp"
+  ]
+}
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/generated/js/src/grammar.json b/tree-sitter/dsk/test-build/generated/js/src/grammar.json
new file mode 100644
index 0000000..0d709e9
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/src/grammar.json
@@ -0,0 +1,115 @@
+{
+  "$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json",
+  "name": "test_lang",
+  "word": "identifier",
+  "rules": {
+    "source_file": {
+      "type": "REPEAT",
+      "content": {
+        "type": "CHOICE",
+        "members": [
+          {
+            "type": "SYMBOL",
+            "name": "statement"
+          },
+          {
+            "type": "SYMBOL",
+            "name": "expression"
+          }
+        ]
+      }
+    },
+    "identifier": {
+      "type": "PATTERN",
+      "value": "[a-zA-Z_][a-zA-Z0-9_]*"
+    },
+    "number": {
+      "type": "PATTERN",
+      "value": "\\d+"
+    },
+    "string": {
+      "type": "PATTERN",
+      "value": "\"[^\"]*\""
+    },
+    "expression": {
+      "type": "CHOICE",
+      "members": [
+        {
+          "type": "SYMBOL",
+          "name": "identifier"
+        },
+        {
+          "type": "SYMBOL",
+          "name": "number"
+        },
+        {
+          "type": "SYMBOL",
+          "name": "string"
+        }
+      ]
+    },
+    "statement": {
+      "type": "CHOICE",
+      "members": [
+        {
+          "type": "SYMBOL",
+          "name": "variable_declaration"
+        }
+      ]
+    },
+    "variable_declaration": {
+      "type": "SEQ",
+      "members": [
+        {
+          "type": "STRING",
+          "value": "let"
+        },
+        {
+          "type": "SYMBOL",
+          "name": "identifier"
+        },
+        {
+          "type": "STRING",
+          "value": "="
+        },
+        {
+          "type": "SYMBOL",
+          "name": "expression"
+        },
+        {
+          "type": "STRING",
+          "value": ";"
+        }
+      ]
+    },
+    "line_comment": {
+      "type": "SEQ",
+      "members": [
+        {
+          "type": "STRING",
+          "value": "//"
+        },
+        {
+          "type": "PATTERN",
+          "value": "[^\\n]*"
+        }
+      ]
+    }
+  },
+  "extras": [
+    {
+      "type": "PATTERN",
+      "value": "\\s"
+    },
+    {
+      "type": "SYMBOL",
+      "name": "line_comment"
+    }
+  ],
+  "conflicts": [],
+  "precedences": [],
+  "externals": [],
+  "inline": [],
+  "supertypes": [],
+  "reserved": {}
+}
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/generated/js/src/node-types.json b/tree-sitter/dsk/test-build/generated/js/src/node-types.json
new file mode 100644
index 0000000..a21a153
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/src/node-types.json
@@ -0,0 +1,112 @@
+[
+  {
+    "type": "expression",
+    "named": true,
+    "fields": {},
+    "children": {
+      "multiple": false,
+      "required": true,
+      "types": [
+        {
+          "type": "identifier",
+          "named": true
+        },
+        {
+          "type": "number",
+          "named": true
+        },
+        {
+          "type": "string",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "line_comment",
+    "named": true,
+    "fields": {}
+  },
+  {
+    "type": "source_file",
+    "named": true,
+    "root": true,
+    "fields": {},
+    "children": {
+      "multiple": true,
+      "required": false,
+      "types": [
+        {
+          "type": "expression",
+          "named": true
+        },
+        {
+          "type": "statement",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "statement",
+    "named": true,
+    "fields": {},
+    "children": {
+      "multiple": false,
+      "required": true,
+      "types": [
+        {
+          "type": "variable_declaration",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "variable_declaration",
+    "named": true,
+    "fields": {},
+    "children": {
+      "multiple": true,
+      "required": true,
+      "types": [
+        {
+          "type": "expression",
+          "named": true
+        },
+        {
+          "type": "identifier",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "//",
+    "named": false
+  },
+  {
+    "type": ";",
+    "named": false
+  },
+  {
+    "type": "=",
+    "named": false
+  },
+  {
+    "type": "identifier",
+    "named": true
+  },
+  {
+    "type": "let",
+    "named": false
+  },
+  {
+    "type": "number",
+    "named": true
+  },
+  {
+    "type": "string",
+    "named": true
+  }
+]
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/generated/js/src/parser.c b/tree-sitter/dsk/test-build/generated/js/src/parser.c
new file mode 100644
index 0000000..2191da5
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/src/parser.c
@@ -0,0 +1,522 @@
+/* Automatically @generated by tree-sitter v0.25.6 */
+
+#include "tree_sitter/parser.h"
+
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
+#endif
+
+#define LANGUAGE_VERSION 14
+#define STATE_COUNT 15
+#define LARGE_STATE_COUNT 5
+#define SYMBOL_COUNT 15
+#define ALIAS_COUNT 0
+#define TOKEN_COUNT 9
+#define EXTERNAL_TOKEN_COUNT 0
+#define FIELD_COUNT 0
+#define MAX_ALIAS_SEQUENCE_LENGTH 5
+#define MAX_RESERVED_WORD_SET_SIZE 0
+#define PRODUCTION_ID_COUNT 1
+#define SUPERTYPE_COUNT 0
+
+enum ts_symbol_identifiers {
+  sym_identifier = 1,
+  sym_number = 2,
+  sym_string = 3,
+  anon_sym_let = 4,
+  anon_sym_EQ = 5,
+  anon_sym_SEMI = 6,
+  anon_sym_SLASH_SLASH = 7,
+  aux_sym_line_comment_token1 = 8,
+  sym_source_file = 9,
+  sym_expression = 10,
+  sym_statement = 11,
+  sym_variable_declaration = 12,
+  sym_line_comment = 13,
+  aux_sym_source_file_repeat1 = 14,
+};
+
+static const char * const ts_symbol_names[] = {
+  [ts_builtin_sym_end] = "end",
+  [sym_identifier] = "identifier",
+  [sym_number] = "number",
+  [sym_string] = "string",
+  [anon_sym_let] = "let",
+  [anon_sym_EQ] = "=",
+  [anon_sym_SEMI] = ";",
+  [anon_sym_SLASH_SLASH] = "//",
+  [aux_sym_line_comment_token1] = "line_comment_token1",
+  [sym_source_file] = "source_file",
+  [sym_expression] = "expression",
+  [sym_statement] = "statement",
+  [sym_variable_declaration] = "variable_declaration",
+  [sym_line_comment] = "line_comment",
+  [aux_sym_source_file_repeat1] = "source_file_repeat1",
+};
+
+static const TSSymbol ts_symbol_map[] = {
+  [ts_builtin_sym_end] = ts_builtin_sym_end,
+  [sym_identifier] = sym_identifier,
+  [sym_number] = sym_number,
+  [sym_string] = sym_string,
+  [anon_sym_let] = anon_sym_let,
+  [anon_sym_EQ] = anon_sym_EQ,
+  [anon_sym_SEMI] = anon_sym_SEMI,
+  [anon_sym_SLASH_SLASH] = anon_sym_SLASH_SLASH,
+  [aux_sym_line_comment_token1] = aux_sym_line_comment_token1,
+  [sym_source_file] = sym_source_file,
+  [sym_expression] = sym_expression,
+  [sym_statement] = sym_statement,
+  [sym_variable_declaration] = sym_variable_declaration,
+  [sym_line_comment] = sym_line_comment,
+  [aux_sym_source_file_repeat1] = aux_sym_source_file_repeat1,
+};
+
+static const TSSymbolMetadata ts_symbol_metadata[] = {
+  [ts_builtin_sym_end] = {
+    .visible = false,
+    .named = true,
+  },
+  [sym_identifier] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_number] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_string] = {
+    .visible = true,
+    .named = true,
+  },
+  [anon_sym_let] = {
+    .visible = true,
+    .named = false,
+  },
+  [anon_sym_EQ] = {
+    .visible = true,
+    .named = false,
+  },
+  [anon_sym_SEMI] = {
+    .visible = true,
+    .named = false,
+  },
+  [anon_sym_SLASH_SLASH] = {
+    .visible = true,
+    .named = false,
+  },
+  [aux_sym_line_comment_token1] = {
+    .visible = false,
+    .named = false,
+  },
+  [sym_source_file] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_expression] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_statement] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_variable_declaration] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_line_comment] = {
+    .visible = true,
+    .named = true,
+  },
+  [aux_sym_source_file_repeat1] = {
+    .visible = false,
+    .named = false,
+  },
+};
+
+static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = {
+  [0] = {0},
+};
+
+static const uint16_t ts_non_terminal_alias_map[] = {
+  0,
+};
+
+static const TSStateId ts_primary_state_ids[STATE_COUNT] = {
+  [0] = 0,
+  [1] = 1,
+  [2] = 2,
+  [3] = 3,
+  [4] = 4,
+  [5] = 5,
+  [6] = 6,
+  [7] = 7,
+  [8] = 8,
+  [9] = 9,
+  [10] = 10,
+  [11] = 11,
+  [12] = 12,
+  [13] = 13,
+  [14] = 14,
+};
+
+static bool ts_lex(TSLexer *lexer, TSStateId state) {
+  START_LEXER();
+  eof = lexer->eof(lexer);
+  switch (state) {
+    case 0:
+      if (eof) ADVANCE(3);
+      if (lookahead == '"') ADVANCE(1);
+      if (lookahead == '/') ADVANCE(2);
+      if (lookahead == ';') ADVANCE(8);
+      if (lookahead == '=') ADVANCE(7);
+      if (('\t' <= lookahead && lookahead <= '\r') ||
+          lookahead == ' ') SKIP(0);
+      if (('0' <= lookahead && lookahead <= '9')) ADVANCE(5);
+      if (('A' <= lookahead && lookahead <= 'Z') ||
+          lookahead == '_' ||
+          ('a' <= lookahead && lookahead <= 'z')) ADVANCE(4);
+      END_STATE();
+    case 1:
+      if (lookahead == '"') ADVANCE(6);
+      if (lookahead != 0) ADVANCE(1);
+      END_STATE();
+    case 2:
+      if (lookahead == '/') ADVANCE(9);
+      END_STATE();
+    case 3:
+      ACCEPT_TOKEN(ts_builtin_sym_end);
+      END_STATE();
+    case 4:
+      ACCEPT_TOKEN(sym_identifier);
+      if (('0' <= lookahead && lookahead <= '9') ||
+          ('A' <= lookahead && lookahead <= 'Z') ||
+          lookahead == '_' ||
+          ('a' <= lookahead && lookahead <= 'z')) ADVANCE(4);
+      END_STATE();
+    case 5:
+      ACCEPT_TOKEN(sym_number);
+      if (('0' <= lookahead && lookahead <= '9')) ADVANCE(5);
+      END_STATE();
+    case 6:
+      ACCEPT_TOKEN(sym_string);
+      END_STATE();
+    case 7:
+      ACCEPT_TOKEN(anon_sym_EQ);
+      END_STATE();
+    case 8:
+      ACCEPT_TOKEN(anon_sym_SEMI);
+      END_STATE();
+    case 9:
+      ACCEPT_TOKEN(anon_sym_SLASH_SLASH);
+      END_STATE();
+    case 10:
+      ACCEPT_TOKEN(anon_sym_SLASH_SLASH);
+      if (lookahead != 0 &&
+          lookahead != '\n') ADVANCE(13);
+      END_STATE();
+    case 11:
+      ACCEPT_TOKEN(aux_sym_line_comment_token1);
+      if (lookahead == '/') ADVANCE(12);
+      if (lookahead == '\t' ||
+          (0x0b <= lookahead && lookahead <= '\r') ||
+          lookahead == ' ') ADVANCE(11);
+      if (lookahead != 0 &&
+          (lookahead < '\t' || '\r' < lookahead)) ADVANCE(13);
+      END_STATE();
+    case 12:
+      ACCEPT_TOKEN(aux_sym_line_comment_token1);
+      if (lookahead == '/') ADVANCE(10);
+      if (lookahead != 0 &&
+          lookahead != '\n') ADVANCE(13);
+      END_STATE();
+    case 13:
+      ACCEPT_TOKEN(aux_sym_line_comment_token1);
+      if (lookahead != 0 &&
+          lookahead != '\n') ADVANCE(13);
+      END_STATE();
+    default:
+      return false;
+  }
+}
+
+static bool ts_lex_keywords(TSLexer *lexer, TSStateId state) {
+  START_LEXER();
+  eof = lexer->eof(lexer);
+  switch (state) {
+    case 0:
+      if (lookahead == 'l') ADVANCE(1);
+      if (('\t' <= lookahead && lookahead <= '\r') ||
+          lookahead == ' ') SKIP(0);
+      END_STATE();
+    case 1:
+      if (lookahead == 'e') ADVANCE(2);
+      END_STATE();
+    case 2:
+      if (lookahead == 't') ADVANCE(3);
+      END_STATE();
+    case 3:
+      ACCEPT_TOKEN(anon_sym_let);
+      END_STATE();
+    default:
+      return false;
+  }
+}
+
+static const TSLexMode ts_lex_modes[STATE_COUNT] = {
+  [0] = {.lex_state = 0},
+  [1] = {.lex_state = 0},
+  [2] = {.lex_state = 0},
+  [3] = {.lex_state = 0},
+  [4] = {.lex_state = 0},
+  [5] = {.lex_state = 0},
+  [6] = {.lex_state = 0},
+  [7] = {.lex_state = 0},
+  [8] = {.lex_state = 0},
+  [9] = {.lex_state = 11},
+  [10] = {.lex_state = 0},
+  [11] = {.lex_state = 0},
+  [12] = {.lex_state = 0},
+  [13] = {.lex_state = 0},
+  [14] = {(TSStateId)(-1),},
+};
+
+static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = {
+  [STATE(0)] = {
+    [sym_line_comment] = STATE(0),
+    [ts_builtin_sym_end] = ACTIONS(1),
+    [sym_identifier] = ACTIONS(1),
+    [sym_number] = ACTIONS(1),
+    [sym_string] = ACTIONS(1),
+    [anon_sym_let] = ACTIONS(1),
+    [anon_sym_EQ] = ACTIONS(1),
+    [anon_sym_SEMI] = ACTIONS(1),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(1)] = {
+    [sym_source_file] = STATE(11),
+    [sym_expression] = STATE(5),
+    [sym_statement] = STATE(5),
+    [sym_variable_declaration] = STATE(6),
+    [sym_line_comment] = STATE(1),
+    [aux_sym_source_file_repeat1] = STATE(2),
+    [ts_builtin_sym_end] = ACTIONS(5),
+    [sym_identifier] = ACTIONS(7),
+    [sym_number] = ACTIONS(9),
+    [sym_string] = ACTIONS(9),
+    [anon_sym_let] = ACTIONS(11),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(2)] = {
+    [sym_expression] = STATE(5),
+    [sym_statement] = STATE(5),
+    [sym_variable_declaration] = STATE(6),
+    [sym_line_comment] = STATE(2),
+    [aux_sym_source_file_repeat1] = STATE(3),
+    [ts_builtin_sym_end] = ACTIONS(13),
+    [sym_identifier] = ACTIONS(7),
+    [sym_number] = ACTIONS(9),
+    [sym_string] = ACTIONS(9),
+    [anon_sym_let] = ACTIONS(11),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(3)] = {
+    [sym_expression] = STATE(5),
+    [sym_statement] = STATE(5),
+    [sym_variable_declaration] = STATE(6),
+    [sym_line_comment] = STATE(3),
+    [aux_sym_source_file_repeat1] = STATE(3),
+    [ts_builtin_sym_end] = ACTIONS(15),
+    [sym_identifier] = ACTIONS(17),
+    [sym_number] = ACTIONS(20),
+    [sym_string] = ACTIONS(20),
+    [anon_sym_let] = ACTIONS(23),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(4)] = {
+    [sym_line_comment] = STATE(4),
+    [ts_builtin_sym_end] = ACTIONS(26),
+    [sym_identifier] = ACTIONS(28),
+    [sym_number] = ACTIONS(26),
+    [sym_string] = ACTIONS(26),
+    [anon_sym_let] = ACTIONS(28),
+    [anon_sym_SEMI] = ACTIONS(26),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+};
+
+static const uint16_t ts_small_parse_table[] = {
+  [0] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(5), 1,
+      sym_line_comment,
+    ACTIONS(32), 2,
+      sym_identifier,
+      anon_sym_let,
+    ACTIONS(30), 3,
+      ts_builtin_sym_end,
+      sym_number,
+      sym_string,
+  [16] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(6), 1,
+      sym_line_comment,
+    ACTIONS(36), 2,
+      sym_identifier,
+      anon_sym_let,
+    ACTIONS(34), 3,
+      ts_builtin_sym_end,
+      sym_number,
+      sym_string,
+  [32] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(7), 1,
+      sym_line_comment,
+    ACTIONS(40), 2,
+      sym_identifier,
+      anon_sym_let,
+    ACTIONS(38), 3,
+      ts_builtin_sym_end,
+      sym_number,
+      sym_string,
+  [48] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(8), 1,
+      sym_line_comment,
+    STATE(13), 1,
+      sym_expression,
+    ACTIONS(9), 3,
+      sym_identifier,
+      sym_number,
+      sym_string,
+  [63] = 3,
+    ACTIONS(42), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(44), 1,
+      aux_sym_line_comment_token1,
+    STATE(9), 1,
+      sym_line_comment,
+  [73] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(46), 1,
+      sym_identifier,
+    STATE(10), 1,
+      sym_line_comment,
+  [83] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(48), 1,
+      ts_builtin_sym_end,
+    STATE(11), 1,
+      sym_line_comment,
+  [93] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(50), 1,
+      anon_sym_EQ,
+    STATE(12), 1,
+      sym_line_comment,
+  [103] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(52), 1,
+      anon_sym_SEMI,
+    STATE(13), 1,
+      sym_line_comment,
+  [113] = 1,
+    ACTIONS(54), 1,
+      ts_builtin_sym_end,
+};
+
+static const uint32_t ts_small_parse_table_map[] = {
+  [SMALL_STATE(5)] = 0,
+  [SMALL_STATE(6)] = 16,
+  [SMALL_STATE(7)] = 32,
+  [SMALL_STATE(8)] = 48,
+  [SMALL_STATE(9)] = 63,
+  [SMALL_STATE(10)] = 73,
+  [SMALL_STATE(11)] = 83,
+  [SMALL_STATE(12)] = 93,
+  [SMALL_STATE(13)] = 103,
+  [SMALL_STATE(14)] = 113,
+};
+
+static const TSParseActionEntry ts_parse_actions[] = {
+  [0] = {.entry = {.count = 0, .reusable = false}},
+  [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(),
+  [3] = {.entry = {.count = 1, .reusable = true}}, SHIFT(9),
+  [5] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 0, 0, 0),
+  [7] = {.entry = {.count = 1, .reusable = false}}, SHIFT(4),
+  [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4),
+  [11] = {.entry = {.count = 1, .reusable = false}}, SHIFT(10),
+  [13] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0),
+  [15] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0),
+  [17] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(4),
+  [20] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(4),
+  [23] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(10),
+  [26] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_expression, 1, 0, 0),
+  [28] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_expression, 1, 0, 0),
+  [30] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 1, 0, 0),
+  [32] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 1, 0, 0),
+  [34] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_statement, 1, 0, 0),
+  [36] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_statement, 1, 0, 0),
+  [38] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_variable_declaration, 5, 0, 0),
+  [40] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_variable_declaration, 5, 0, 0),
+  [42] = {.entry = {.count = 1, .reusable = false}}, SHIFT(9),
+  [44] = {.entry = {.count = 1, .reusable = false}}, SHIFT(14),
+  [46] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12),
+  [48] = {.entry = {.count = 1, .reusable = true}},  ACCEPT_INPUT(),
+  [50] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8),
+  [52] = {.entry = {.count = 1, .reusable = true}}, SHIFT(7),
+  [54] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_line_comment, 2, 0, 0),
+};
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#ifdef TREE_SITTER_HIDE_SYMBOLS
+#define TS_PUBLIC
+#elif defined(_WIN32)
+#define TS_PUBLIC __declspec(dllexport)
+#else
+#define TS_PUBLIC __attribute__((visibility("default")))
+#endif
+
+TS_PUBLIC const TSLanguage *tree_sitter_test_lang(void) {
+  static const TSLanguage language = {
+    .abi_version = LANGUAGE_VERSION,
+    .symbol_count = SYMBOL_COUNT,
+    .alias_count = ALIAS_COUNT,
+    .token_count = TOKEN_COUNT,
+    .external_token_count = EXTERNAL_TOKEN_COUNT,
+    .state_count = STATE_COUNT,
+    .large_state_count = LARGE_STATE_COUNT,
+    .production_id_count = PRODUCTION_ID_COUNT,
+    .field_count = FIELD_COUNT,
+    .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
+    .parse_table = &ts_parse_table[0][0],
+    .small_parse_table = ts_small_parse_table,
+    .small_parse_table_map = ts_small_parse_table_map,
+    .parse_actions = ts_parse_actions,
+    .symbol_names = ts_symbol_names,
+    .symbol_metadata = ts_symbol_metadata,
+    .public_symbol_map = ts_symbol_map,
+    .alias_map = ts_non_terminal_alias_map,
+    .alias_sequences = &ts_alias_sequences[0][0],
+    .lex_modes = (const void*)ts_lex_modes,
+    .lex_fn = ts_lex,
+    .keyword_lex_fn = ts_lex_keywords,
+    .keyword_capture_token = sym_identifier,
+    .primary_state_ids = ts_primary_state_ids,
+  };
+  return &language;
+}
+#ifdef __cplusplus
+}
+#endif
diff --git a/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/alloc.h b/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/alloc.h
new file mode 100644
index 0000000..1abdd12
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/alloc.h
@@ -0,0 +1,54 @@
+#ifndef TREE_SITTER_ALLOC_H_
+#define TREE_SITTER_ALLOC_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+// Allow clients to override allocation functions
+#ifdef TREE_SITTER_REUSE_ALLOCATOR
+
+extern void *(*ts_current_malloc)(size_t size);
+extern void *(*ts_current_calloc)(size_t count, size_t size);
+extern void *(*ts_current_realloc)(void *ptr, size_t size);
+extern void (*ts_current_free)(void *ptr);
+
+#ifndef ts_malloc
+#define ts_malloc  ts_current_malloc
+#endif
+#ifndef ts_calloc
+#define ts_calloc  ts_current_calloc
+#endif
+#ifndef ts_realloc
+#define ts_realloc ts_current_realloc
+#endif
+#ifndef ts_free
+#define ts_free    ts_current_free
+#endif
+
+#else
+
+#ifndef ts_malloc
+#define ts_malloc  malloc
+#endif
+#ifndef ts_calloc
+#define ts_calloc  calloc
+#endif
+#ifndef ts_realloc
+#define ts_realloc realloc
+#endif
+#ifndef ts_free
+#define ts_free    free
+#endif
+
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // TREE_SITTER_ALLOC_H_
diff --git a/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/array.h b/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/array.h
new file mode 100644
index 0000000..a17a574
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/array.h
@@ -0,0 +1,291 @@
+#ifndef TREE_SITTER_ARRAY_H_
+#define TREE_SITTER_ARRAY_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "./alloc.h"
+
+#include <assert.h>
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _MSC_VER
+#pragma warning(push)
+#pragma warning(disable : 4101)
+#elif defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunused-variable"
+#endif
+
+#define Array(T)       \
+  struct {             \
+    T *contents;       \
+    uint32_t size;     \
+    uint32_t capacity; \
+  }
+
+/// Initialize an array.
+#define array_init(self) \
+  ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL)
+
+/// Create an empty array.
+#define array_new() \
+  { NULL, 0, 0 }
+
+/// Get a pointer to the element at a given `index` in the array.
+#define array_get(self, _index) \
+  (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index])
+
+/// Get a pointer to the first element in the array.
+#define array_front(self) array_get(self, 0)
+
+/// Get a pointer to the last element in the array.
+#define array_back(self) array_get(self, (self)->size - 1)
+
+/// Clear the array, setting its size to zero. Note that this does not free any
+/// memory allocated for the array's contents.
+#define array_clear(self) ((self)->size = 0)
+
+/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is
+/// less than the array's current capacity, this function has no effect.
+#define array_reserve(self, new_capacity) \
+  _array__reserve((Array *)(self), array_elem_size(self), new_capacity)
+
+/// Free any memory allocated for this array. Note that this does not free any
+/// memory allocated for the array's contents.
+#define array_delete(self) _array__delete((Array *)(self))
+
+/// Push a new `element` onto the end of the array.
+#define array_push(self, element)                            \
+  (_array__grow((Array *)(self), 1, array_elem_size(self)), \
+   (self)->contents[(self)->size++] = (element))
+
+/// Increase the array's size by `count` elements.
+/// New elements are zero-initialized.
+#define array_grow_by(self, count) \
+  do { \
+    if ((count) == 0) break; \
+    _array__grow((Array *)(self), count, array_elem_size(self)); \
+    memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \
+    (self)->size += (count); \
+  } while (0)
+
+/// Append all elements from one array to the end of another.
+#define array_push_all(self, other)                                       \
+  array_extend((self), (other)->size, (other)->contents)
+
+/// Append `count` elements to the end of the array, reading their values from the
+/// `contents` pointer.
+#define array_extend(self, count, contents)                    \
+  _array__splice(                                               \
+    (Array *)(self), array_elem_size(self), (self)->size, \
+    0, count,  contents                                        \
+  )
+
+/// Remove `old_count` elements from the array starting at the given `index`. At
+/// the same index, insert `new_count` new elements, reading their values from the
+/// `new_contents` pointer.
+#define array_splice(self, _index, old_count, new_count, new_contents)  \
+  _array__splice(                                                       \
+    (Array *)(self), array_elem_size(self), _index,                \
+    old_count, new_count, new_contents                                 \
+  )
+
+/// Insert one `element` into the array at the given `index`.
+#define array_insert(self, _index, element) \
+  _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element))
+
+/// Remove one element from the array at the given `index`.
+#define array_erase(self, _index) \
+  _array__erase((Array *)(self), array_elem_size(self), _index)
+
+/// Pop the last element off the array, returning the element by value.
+#define array_pop(self) ((self)->contents[--(self)->size])
+
+/// Assign the contents of one array to another, reallocating if necessary.
+#define array_assign(self, other) \
+  _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self))
+
+/// Swap one array with another
+#define array_swap(self, other) \
+  _array__swap((Array *)(self), (Array *)(other))
+
+/// Get the size of the array contents
+#define array_elem_size(self) (sizeof *(self)->contents)
+
+/// Search a sorted array for a given `needle` value, using the given `compare`
+/// callback to determine the order.
+///
+/// If an existing element is found to be equal to `needle`, then the `index`
+/// out-parameter is set to the existing value's index, and the `exists`
+/// out-parameter is set to true. Otherwise, `index` is set to an index where
+/// `needle` should be inserted in order to preserve the sorting, and `exists`
+/// is set to false.
+#define array_search_sorted_with(self, compare, needle, _index, _exists) \
+  _array__search_sorted(self, 0, compare, , needle, _index, _exists)
+
+/// Search a sorted array for a given `needle` value, using integer comparisons
+/// of a given struct field (specified with a leading dot) to determine the order.
+///
+/// See also `array_search_sorted_with`.
+#define array_search_sorted_by(self, field, needle, _index, _exists) \
+  _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists)
+
+/// Insert a given `value` into a sorted array, using the given `compare`
+/// callback to determine the order.
+#define array_insert_sorted_with(self, compare, value) \
+  do { \
+    unsigned _index, _exists; \
+    array_search_sorted_with(self, compare, &(value), &_index, &_exists); \
+    if (!_exists) array_insert(self, _index, value); \
+  } while (0)
+
+/// Insert a given `value` into a sorted array, using integer comparisons of
+/// a given struct field (specified with a leading dot) to determine the order.
+///
+/// See also `array_search_sorted_by`.
+#define array_insert_sorted_by(self, field, value) \
+  do { \
+    unsigned _index, _exists; \
+    array_search_sorted_by(self, field, (value) field, &_index, &_exists); \
+    if (!_exists) array_insert(self, _index, value); \
+  } while (0)
+
+// Private
+
+typedef Array(void) Array;
+
+/// This is not what you're looking for, see `array_delete`.
+static inline void _array__delete(Array *self) {
+  if (self->contents) {
+    ts_free(self->contents);
+    self->contents = NULL;
+    self->size = 0;
+    self->capacity = 0;
+  }
+}
+
+/// This is not what you're looking for, see `array_erase`.
+static inline void _array__erase(Array *self, size_t element_size,
+                                uint32_t index) {
+  assert(index < self->size);
+  char *contents = (char *)self->contents;
+  memmove(contents + index * element_size, contents + (index + 1) * element_size,
+          (self->size - index - 1) * element_size);
+  self->size--;
+}
+
+/// This is not what you're looking for, see `array_reserve`.
+static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) {
+  if (new_capacity > self->capacity) {
+    if (self->contents) {
+      self->contents = ts_realloc(self->contents, new_capacity * element_size);
+    } else {
+      self->contents = ts_malloc(new_capacity * element_size);
+    }
+    self->capacity = new_capacity;
+  }
+}
+
+/// This is not what you're looking for, see `array_assign`.
+static inline void _array__assign(Array *self, const Array *other, size_t element_size) {
+  _array__reserve(self, element_size, other->size);
+  self->size = other->size;
+  memcpy(self->contents, other->contents, self->size * element_size);
+}
+
+/// This is not what you're looking for, see `array_swap`.
+static inline void _array__swap(Array *self, Array *other) {
+  Array swap = *other;
+  *other = *self;
+  *self = swap;
+}
+
+/// This is not what you're looking for, see `array_push` or `array_grow_by`.
+static inline void _array__grow(Array *self, uint32_t count, size_t element_size) {
+  uint32_t new_size = self->size + count;
+  if (new_size > self->capacity) {
+    uint32_t new_capacity = self->capacity * 2;
+    if (new_capacity < 8) new_capacity = 8;
+    if (new_capacity < new_size) new_capacity = new_size;
+    _array__reserve(self, element_size, new_capacity);
+  }
+}
+
+/// This is not what you're looking for, see `array_splice`.
+static inline void _array__splice(Array *self, size_t element_size,
+                                 uint32_t index, uint32_t old_count,
+                                 uint32_t new_count, const void *elements) {
+  uint32_t new_size = self->size + new_count - old_count;
+  uint32_t old_end = index + old_count;
+  uint32_t new_end = index + new_count;
+  assert(old_end <= self->size);
+
+  _array__reserve(self, element_size, new_size);
+
+  char *contents = (char *)self->contents;
+  if (self->size > old_end) {
+    memmove(
+      contents + new_end * element_size,
+      contents + old_end * element_size,
+      (self->size - old_end) * element_size
+    );
+  }
+  if (new_count > 0) {
+    if (elements) {
+      memcpy(
+        (contents + index * element_size),
+        elements,
+        new_count * element_size
+      );
+    } else {
+      memset(
+        (contents + index * element_size),
+        0,
+        new_count * element_size
+      );
+    }
+  }
+  self->size += new_count - old_count;
+}
+
+/// A binary search routine, based on Rust's `std::slice::binary_search_by`.
+/// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`.
+#define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \
+  do { \
+    *(_index) = start; \
+    *(_exists) = false; \
+    uint32_t size = (self)->size - *(_index); \
+    if (size == 0) break; \
+    int comparison; \
+    while (size > 1) { \
+      uint32_t half_size = size / 2; \
+      uint32_t mid_index = *(_index) + half_size; \
+      comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \
+      if (comparison <= 0) *(_index) = mid_index; \
+      size -= half_size; \
+    } \
+    comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \
+    if (comparison == 0) *(_exists) = true; \
+    else if (comparison < 0) *(_index) += 1; \
+  } while (0)
+
+/// Helper macro for the `_sorted_by` routines below. This takes the left (existing)
+/// parameter by reference in order to work with the generic sorting function above.
+#define _compare_int(a, b) ((int)*(a) - (int)(b))
+
+#ifdef _MSC_VER
+#pragma warning(pop)
+#elif defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic pop
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // TREE_SITTER_ARRAY_H_
diff --git a/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/parser.h b/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/parser.h
new file mode 100644
index 0000000..858107d
--- /dev/null
+++ b/tree-sitter/dsk/test-build/generated/js/src/tree_sitter/parser.h
@@ -0,0 +1,286 @@
+#ifndef TREE_SITTER_PARSER_H_
+#define TREE_SITTER_PARSER_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdlib.h>
+
+#define ts_builtin_sym_error ((TSSymbol)-1)
+#define ts_builtin_sym_end 0
+#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
+
+#ifndef TREE_SITTER_API_H_
+typedef uint16_t TSStateId;
+typedef uint16_t TSSymbol;
+typedef uint16_t TSFieldId;
+typedef struct TSLanguage TSLanguage;
+typedef struct TSLanguageMetadata {
+  uint8_t major_version;
+  uint8_t minor_version;
+  uint8_t patch_version;
+} TSLanguageMetadata;
+#endif
+
+typedef struct {
+  TSFieldId field_id;
+  uint8_t child_index;
+  bool inherited;
+} TSFieldMapEntry;
+
+// Used to index the field and supertype maps.
+typedef struct {
+  uint16_t index;
+  uint16_t length;
+} TSMapSlice;
+
+typedef struct {
+  bool visible;
+  bool named;
+  bool supertype;
+} TSSymbolMetadata;
+
+typedef struct TSLexer TSLexer;
+
+struct TSLexer {
+  int32_t lookahead;
+  TSSymbol result_symbol;
+  void (*advance)(TSLexer *, bool);
+  void (*mark_end)(TSLexer *);
+  uint32_t (*get_column)(TSLexer *);
+  bool (*is_at_included_range_start)(const TSLexer *);
+  bool (*eof)(const TSLexer *);
+  void (*log)(const TSLexer *, const char *, ...);
+};
+
+typedef enum {
+  TSParseActionTypeShift,
+  TSParseActionTypeReduce,
+  TSParseActionTypeAccept,
+  TSParseActionTypeRecover,
+} TSParseActionType;
+
+typedef union {
+  struct {
+    uint8_t type;
+    TSStateId state;
+    bool extra;
+    bool repetition;
+  } shift;
+  struct {
+    uint8_t type;
+    uint8_t child_count;
+    TSSymbol symbol;
+    int16_t dynamic_precedence;
+    uint16_t production_id;
+  } reduce;
+  uint8_t type;
+} TSParseAction;
+
+typedef struct {
+  uint16_t lex_state;
+  uint16_t external_lex_state;
+} TSLexMode;
+
+typedef struct {
+  uint16_t lex_state;
+  uint16_t external_lex_state;
+  uint16_t reserved_word_set_id;
+} TSLexerMode;
+
+typedef union {
+  TSParseAction action;
+  struct {
+    uint8_t count;
+    bool reusable;
+  } entry;
+} TSParseActionEntry;
+
+typedef struct {
+  int32_t start;
+  int32_t end;
+} TSCharacterRange;
+
+struct TSLanguage {
+  uint32_t abi_version;
+  uint32_t symbol_count;
+  uint32_t alias_count;
+  uint32_t token_count;
+  uint32_t external_token_count;
+  uint32_t state_count;
+  uint32_t large_state_count;
+  uint32_t production_id_count;
+  uint32_t field_count;
+  uint16_t max_alias_sequence_length;
+  const uint16_t *parse_table;
+  const uint16_t *small_parse_table;
+  const uint32_t *small_parse_table_map;
+  const TSParseActionEntry *parse_actions;
+  const char * const *symbol_names;
+  const char * const *field_names;
+  const TSMapSlice *field_map_slices;
+  const TSFieldMapEntry *field_map_entries;
+  const TSSymbolMetadata *symbol_metadata;
+  const TSSymbol *public_symbol_map;
+  const uint16_t *alias_map;
+  const TSSymbol *alias_sequences;
+  const TSLexerMode *lex_modes;
+  bool (*lex_fn)(TSLexer *, TSStateId);
+  bool (*keyword_lex_fn)(TSLexer *, TSStateId);
+  TSSymbol keyword_capture_token;
+  struct {
+    const bool *states;
+    const TSSymbol *symbol_map;
+    void *(*create)(void);
+    void (*destroy)(void *);
+    bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
+    unsigned (*serialize)(void *, char *);
+    void (*deserialize)(void *, const char *, unsigned);
+  } external_scanner;
+  const TSStateId *primary_state_ids;
+  const char *name;
+  const TSSymbol *reserved_words;
+  uint16_t max_reserved_word_set_size;
+  uint32_t supertype_count;
+  const TSSymbol *supertype_symbols;
+  const TSMapSlice *supertype_map_slices;
+  const TSSymbol *supertype_map_entries;
+  TSLanguageMetadata metadata;
+};
+
+static inline bool set_contains(const TSCharacterRange *ranges, uint32_t len, int32_t lookahead) {
+  uint32_t index = 0;
+  uint32_t size = len - index;
+  while (size > 1) {
+    uint32_t half_size = size / 2;
+    uint32_t mid_index = index + half_size;
+    const TSCharacterRange *range = &ranges[mid_index];
+    if (lookahead >= range->start && lookahead <= range->end) {
+      return true;
+    } else if (lookahead > range->end) {
+      index = mid_index;
+    }
+    size -= half_size;
+  }
+  const TSCharacterRange *range = &ranges[index];
+  return (lookahead >= range->start && lookahead <= range->end);
+}
+
+/*
+ *  Lexer Macros
+ */
+
+#ifdef _MSC_VER
+#define UNUSED __pragma(warning(suppress : 4101))
+#else
+#define UNUSED __attribute__((unused))
+#endif
+
+#define START_LEXER()           \
+  bool result = false;          \
+  bool skip = false;            \
+  UNUSED                        \
+  bool eof = false;             \
+  int32_t lookahead;            \
+  goto start;                   \
+  next_state:                   \
+  lexer->advance(lexer, skip);  \
+  start:                        \
+  skip = false;                 \
+  lookahead = lexer->lookahead;
+
+#define ADVANCE(state_value) \
+  {                          \
+    state = state_value;     \
+    goto next_state;         \
+  }
+
+#define ADVANCE_MAP(...)                                              \
+  {                                                                   \
+    static const uint16_t map[] = { __VA_ARGS__ };                    \
+    for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) {  \
+      if (map[i] == lookahead) {                                      \
+        state = map[i + 1];                                           \
+        goto next_state;                                              \
+      }                                                               \
+    }                                                                 \
+  }
+
+#define SKIP(state_value) \
+  {                       \
+    skip = true;          \
+    state = state_value;  \
+    goto next_state;      \
+  }
+
+#define ACCEPT_TOKEN(symbol_value)     \
+  result = true;                       \
+  lexer->result_symbol = symbol_value; \
+  lexer->mark_end(lexer);
+
+#define END_STATE() return result;
+
+/*
+ *  Parse Table Macros
+ */
+
+#define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT)
+
+#define STATE(id) id
+
+#define ACTIONS(id) id
+
+#define SHIFT(state_value)            \
+  {{                                  \
+    .shift = {                        \
+      .type = TSParseActionTypeShift, \
+      .state = (state_value)          \
+    }                                 \
+  }}
+
+#define SHIFT_REPEAT(state_value)     \
+  {{                                  \
+    .shift = {                        \
+      .type = TSParseActionTypeShift, \
+      .state = (state_value),         \
+      .repetition = true              \
+    }                                 \
+  }}
+
+#define SHIFT_EXTRA()                 \
+  {{                                  \
+    .shift = {                        \
+      .type = TSParseActionTypeShift, \
+      .extra = true                   \
+    }                                 \
+  }}
+
+#define REDUCE(symbol_name, children, precedence, prod_id) \
+  {{                                                       \
+    .reduce = {                                            \
+      .type = TSParseActionTypeReduce,                     \
+      .symbol = symbol_name,                               \
+      .child_count = children,                             \
+      .dynamic_precedence = precedence,                    \
+      .production_id = prod_id                             \
+    },                                                     \
+  }}
+
+#define RECOVER()                    \
+  {{                                 \
+    .type = TSParseActionTypeRecover \
+  }}
+
+#define ACCEPT_INPUT()              \
+  {{                                \
+    .type = TSParseActionTypeAccept \
+  }}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // TREE_SITTER_PARSER_H_
diff --git a/tree-sitter/dsk/test-build/grammar.js b/tree-sitter/dsk/test-build/grammar.js
new file mode 100644
index 0000000..697a76d
--- /dev/null
+++ b/tree-sitter/dsk/test-build/grammar.js
@@ -0,0 +1,48 @@
+/**
+ * Simple test grammar for build system verification
+ */
+
+module.exports = grammar({
+  name: 'test_lang',
+
+  word: $ => $.identifier,
+
+  rules: {
+    // Root rule
+    source_file: $ => repeat(choice($.statement, $.expression)),
+
+    // Basic tokens
+    identifier: $ => /[a-zA-Z_][a-zA-Z0-9_]*/,
+    number: $ => /\d+/,
+    string: $ => /"[^"]*"/,
+
+    // Expressions
+    expression: $ => choice(
+      $.identifier,
+      $.number,
+      $.string
+    ),
+
+    // Statements
+    statement: $ => choice(
+      $.variable_declaration
+    ),
+
+    // Variable declaration
+    variable_declaration: $ => seq(
+      'let',
+      $.identifier,
+      '=',
+      $.expression,
+      ';'
+    ),
+
+    // Comments
+    line_comment: $ => seq('//', /[^\n]*/)
+  },
+
+  extras: $ => [
+    /\s/,
+    $.line_comment
+  ]
+});
diff --git a/tree-sitter/dsk/test-build/src/grammar.json b/tree-sitter/dsk/test-build/src/grammar.json
new file mode 100644
index 0000000..0d709e9
--- /dev/null
+++ b/tree-sitter/dsk/test-build/src/grammar.json
@@ -0,0 +1,115 @@
+{
+  "$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json",
+  "name": "test_lang",
+  "word": "identifier",
+  "rules": {
+    "source_file": {
+      "type": "REPEAT",
+      "content": {
+        "type": "CHOICE",
+        "members": [
+          {
+            "type": "SYMBOL",
+            "name": "statement"
+          },
+          {
+            "type": "SYMBOL",
+            "name": "expression"
+          }
+        ]
+      }
+    },
+    "identifier": {
+      "type": "PATTERN",
+      "value": "[a-zA-Z_][a-zA-Z0-9_]*"
+    },
+    "number": {
+      "type": "PATTERN",
+      "value": "\\d+"
+    },
+    "string": {
+      "type": "PATTERN",
+      "value": "\"[^\"]*\""
+    },
+    "expression": {
+      "type": "CHOICE",
+      "members": [
+        {
+          "type": "SYMBOL",
+          "name": "identifier"
+        },
+        {
+          "type": "SYMBOL",
+          "name": "number"
+        },
+        {
+          "type": "SYMBOL",
+          "name": "string"
+        }
+      ]
+    },
+    "statement": {
+      "type": "CHOICE",
+      "members": [
+        {
+          "type": "SYMBOL",
+          "name": "variable_declaration"
+        }
+      ]
+    },
+    "variable_declaration": {
+      "type": "SEQ",
+      "members": [
+        {
+          "type": "STRING",
+          "value": "let"
+        },
+        {
+          "type": "SYMBOL",
+          "name": "identifier"
+        },
+        {
+          "type": "STRING",
+          "value": "="
+        },
+        {
+          "type": "SYMBOL",
+          "name": "expression"
+        },
+        {
+          "type": "STRING",
+          "value": ";"
+        }
+      ]
+    },
+    "line_comment": {
+      "type": "SEQ",
+      "members": [
+        {
+          "type": "STRING",
+          "value": "//"
+        },
+        {
+          "type": "PATTERN",
+          "value": "[^\\n]*"
+        }
+      ]
+    }
+  },
+  "extras": [
+    {
+      "type": "PATTERN",
+      "value": "\\s"
+    },
+    {
+      "type": "SYMBOL",
+      "name": "line_comment"
+    }
+  ],
+  "conflicts": [],
+  "precedences": [],
+  "externals": [],
+  "inline": [],
+  "supertypes": [],
+  "reserved": {}
+}
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/src/node-types.json b/tree-sitter/dsk/test-build/src/node-types.json
new file mode 100644
index 0000000..a21a153
--- /dev/null
+++ b/tree-sitter/dsk/test-build/src/node-types.json
@@ -0,0 +1,112 @@
+[
+  {
+    "type": "expression",
+    "named": true,
+    "fields": {},
+    "children": {
+      "multiple": false,
+      "required": true,
+      "types": [
+        {
+          "type": "identifier",
+          "named": true
+        },
+        {
+          "type": "number",
+          "named": true
+        },
+        {
+          "type": "string",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "line_comment",
+    "named": true,
+    "fields": {}
+  },
+  {
+    "type": "source_file",
+    "named": true,
+    "root": true,
+    "fields": {},
+    "children": {
+      "multiple": true,
+      "required": false,
+      "types": [
+        {
+          "type": "expression",
+          "named": true
+        },
+        {
+          "type": "statement",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "statement",
+    "named": true,
+    "fields": {},
+    "children": {
+      "multiple": false,
+      "required": true,
+      "types": [
+        {
+          "type": "variable_declaration",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "variable_declaration",
+    "named": true,
+    "fields": {},
+    "children": {
+      "multiple": true,
+      "required": true,
+      "types": [
+        {
+          "type": "expression",
+          "named": true
+        },
+        {
+          "type": "identifier",
+          "named": true
+        }
+      ]
+    }
+  },
+  {
+    "type": "//",
+    "named": false
+  },
+  {
+    "type": ";",
+    "named": false
+  },
+  {
+    "type": "=",
+    "named": false
+  },
+  {
+    "type": "identifier",
+    "named": true
+  },
+  {
+    "type": "let",
+    "named": false
+  },
+  {
+    "type": "number",
+    "named": true
+  },
+  {
+    "type": "string",
+    "named": true
+  }
+]
\ No newline at end of file
diff --git a/tree-sitter/dsk/test-build/src/parser.c b/tree-sitter/dsk/test-build/src/parser.c
new file mode 100644
index 0000000..2191da5
--- /dev/null
+++ b/tree-sitter/dsk/test-build/src/parser.c
@@ -0,0 +1,522 @@
+/* Automatically @generated by tree-sitter v0.25.6 */
+
+#include "tree_sitter/parser.h"
+
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
+#endif
+
+#define LANGUAGE_VERSION 14
+#define STATE_COUNT 15
+#define LARGE_STATE_COUNT 5
+#define SYMBOL_COUNT 15
+#define ALIAS_COUNT 0
+#define TOKEN_COUNT 9
+#define EXTERNAL_TOKEN_COUNT 0
+#define FIELD_COUNT 0
+#define MAX_ALIAS_SEQUENCE_LENGTH 5
+#define MAX_RESERVED_WORD_SET_SIZE 0
+#define PRODUCTION_ID_COUNT 1
+#define SUPERTYPE_COUNT 0
+
+enum ts_symbol_identifiers {
+  sym_identifier = 1,
+  sym_number = 2,
+  sym_string = 3,
+  anon_sym_let = 4,
+  anon_sym_EQ = 5,
+  anon_sym_SEMI = 6,
+  anon_sym_SLASH_SLASH = 7,
+  aux_sym_line_comment_token1 = 8,
+  sym_source_file = 9,
+  sym_expression = 10,
+  sym_statement = 11,
+  sym_variable_declaration = 12,
+  sym_line_comment = 13,
+  aux_sym_source_file_repeat1 = 14,
+};
+
+static const char * const ts_symbol_names[] = {
+  [ts_builtin_sym_end] = "end",
+  [sym_identifier] = "identifier",
+  [sym_number] = "number",
+  [sym_string] = "string",
+  [anon_sym_let] = "let",
+  [anon_sym_EQ] = "=",
+  [anon_sym_SEMI] = ";",
+  [anon_sym_SLASH_SLASH] = "//",
+  [aux_sym_line_comment_token1] = "line_comment_token1",
+  [sym_source_file] = "source_file",
+  [sym_expression] = "expression",
+  [sym_statement] = "statement",
+  [sym_variable_declaration] = "variable_declaration",
+  [sym_line_comment] = "line_comment",
+  [aux_sym_source_file_repeat1] = "source_file_repeat1",
+};
+
+static const TSSymbol ts_symbol_map[] = {
+  [ts_builtin_sym_end] = ts_builtin_sym_end,
+  [sym_identifier] = sym_identifier,
+  [sym_number] = sym_number,
+  [sym_string] = sym_string,
+  [anon_sym_let] = anon_sym_let,
+  [anon_sym_EQ] = anon_sym_EQ,
+  [anon_sym_SEMI] = anon_sym_SEMI,
+  [anon_sym_SLASH_SLASH] = anon_sym_SLASH_SLASH,
+  [aux_sym_line_comment_token1] = aux_sym_line_comment_token1,
+  [sym_source_file] = sym_source_file,
+  [sym_expression] = sym_expression,
+  [sym_statement] = sym_statement,
+  [sym_variable_declaration] = sym_variable_declaration,
+  [sym_line_comment] = sym_line_comment,
+  [aux_sym_source_file_repeat1] = aux_sym_source_file_repeat1,
+};
+
+static const TSSymbolMetadata ts_symbol_metadata[] = {
+  [ts_builtin_sym_end] = {
+    .visible = false,
+    .named = true,
+  },
+  [sym_identifier] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_number] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_string] = {
+    .visible = true,
+    .named = true,
+  },
+  [anon_sym_let] = {
+    .visible = true,
+    .named = false,
+  },
+  [anon_sym_EQ] = {
+    .visible = true,
+    .named = false,
+  },
+  [anon_sym_SEMI] = {
+    .visible = true,
+    .named = false,
+  },
+  [anon_sym_SLASH_SLASH] = {
+    .visible = true,
+    .named = false,
+  },
+  [aux_sym_line_comment_token1] = {
+    .visible = false,
+    .named = false,
+  },
+  [sym_source_file] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_expression] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_statement] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_variable_declaration] = {
+    .visible = true,
+    .named = true,
+  },
+  [sym_line_comment] = {
+    .visible = true,
+    .named = true,
+  },
+  [aux_sym_source_file_repeat1] = {
+    .visible = false,
+    .named = false,
+  },
+};
+
+static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = {
+  [0] = {0},
+};
+
+static const uint16_t ts_non_terminal_alias_map[] = {
+  0,
+};
+
+static const TSStateId ts_primary_state_ids[STATE_COUNT] = {
+  [0] = 0,
+  [1] = 1,
+  [2] = 2,
+  [3] = 3,
+  [4] = 4,
+  [5] = 5,
+  [6] = 6,
+  [7] = 7,
+  [8] = 8,
+  [9] = 9,
+  [10] = 10,
+  [11] = 11,
+  [12] = 12,
+  [13] = 13,
+  [14] = 14,
+};
+
+static bool ts_lex(TSLexer *lexer, TSStateId state) {
+  START_LEXER();
+  eof = lexer->eof(lexer);
+  switch (state) {
+    case 0:
+      if (eof) ADVANCE(3);
+      if (lookahead == '"') ADVANCE(1);
+      if (lookahead == '/') ADVANCE(2);
+      if (lookahead == ';') ADVANCE(8);
+      if (lookahead == '=') ADVANCE(7);
+      if (('\t' <= lookahead && lookahead <= '\r') ||
+          lookahead == ' ') SKIP(0);
+      if (('0' <= lookahead && lookahead <= '9')) ADVANCE(5);
+      if (('A' <= lookahead && lookahead <= 'Z') ||
+          lookahead == '_' ||
+          ('a' <= lookahead && lookahead <= 'z')) ADVANCE(4);
+      END_STATE();
+    case 1:
+      if (lookahead == '"') ADVANCE(6);
+      if (lookahead != 0) ADVANCE(1);
+      END_STATE();
+    case 2:
+      if (lookahead == '/') ADVANCE(9);
+      END_STATE();
+    case 3:
+      ACCEPT_TOKEN(ts_builtin_sym_end);
+      END_STATE();
+    case 4:
+      ACCEPT_TOKEN(sym_identifier);
+      if (('0' <= lookahead && lookahead <= '9') ||
+          ('A' <= lookahead && lookahead <= 'Z') ||
+          lookahead == '_' ||
+          ('a' <= lookahead && lookahead <= 'z')) ADVANCE(4);
+      END_STATE();
+    case 5:
+      ACCEPT_TOKEN(sym_number);
+      if (('0' <= lookahead && lookahead <= '9')) ADVANCE(5);
+      END_STATE();
+    case 6:
+      ACCEPT_TOKEN(sym_string);
+      END_STATE();
+    case 7:
+      ACCEPT_TOKEN(anon_sym_EQ);
+      END_STATE();
+    case 8:
+      ACCEPT_TOKEN(anon_sym_SEMI);
+      END_STATE();
+    case 9:
+      ACCEPT_TOKEN(anon_sym_SLASH_SLASH);
+      END_STATE();
+    case 10:
+      ACCEPT_TOKEN(anon_sym_SLASH_SLASH);
+      if (lookahead != 0 &&
+          lookahead != '\n') ADVANCE(13);
+      END_STATE();
+    case 11:
+      ACCEPT_TOKEN(aux_sym_line_comment_token1);
+      if (lookahead == '/') ADVANCE(12);
+      if (lookahead == '\t' ||
+          (0x0b <= lookahead && lookahead <= '\r') ||
+          lookahead == ' ') ADVANCE(11);
+      if (lookahead != 0 &&
+          (lookahead < '\t' || '\r' < lookahead)) ADVANCE(13);
+      END_STATE();
+    case 12:
+      ACCEPT_TOKEN(aux_sym_line_comment_token1);
+      if (lookahead == '/') ADVANCE(10);
+      if (lookahead != 0 &&
+          lookahead != '\n') ADVANCE(13);
+      END_STATE();
+    case 13:
+      ACCEPT_TOKEN(aux_sym_line_comment_token1);
+      if (lookahead != 0 &&
+          lookahead != '\n') ADVANCE(13);
+      END_STATE();
+    default:
+      return false;
+  }
+}
+
+static bool ts_lex_keywords(TSLexer *lexer, TSStateId state) {
+  START_LEXER();
+  eof = lexer->eof(lexer);
+  switch (state) {
+    case 0:
+      if (lookahead == 'l') ADVANCE(1);
+      if (('\t' <= lookahead && lookahead <= '\r') ||
+          lookahead == ' ') SKIP(0);
+      END_STATE();
+    case 1:
+      if (lookahead == 'e') ADVANCE(2);
+      END_STATE();
+    case 2:
+      if (lookahead == 't') ADVANCE(3);
+      END_STATE();
+    case 3:
+      ACCEPT_TOKEN(anon_sym_let);
+      END_STATE();
+    default:
+      return false;
+  }
+}
+
+static const TSLexMode ts_lex_modes[STATE_COUNT] = {
+  [0] = {.lex_state = 0},
+  [1] = {.lex_state = 0},
+  [2] = {.lex_state = 0},
+  [3] = {.lex_state = 0},
+  [4] = {.lex_state = 0},
+  [5] = {.lex_state = 0},
+  [6] = {.lex_state = 0},
+  [7] = {.lex_state = 0},
+  [8] = {.lex_state = 0},
+  [9] = {.lex_state = 11},
+  [10] = {.lex_state = 0},
+  [11] = {.lex_state = 0},
+  [12] = {.lex_state = 0},
+  [13] = {.lex_state = 0},
+  [14] = {(TSStateId)(-1),},
+};
+
+static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = {
+  [STATE(0)] = {
+    [sym_line_comment] = STATE(0),
+    [ts_builtin_sym_end] = ACTIONS(1),
+    [sym_identifier] = ACTIONS(1),
+    [sym_number] = ACTIONS(1),
+    [sym_string] = ACTIONS(1),
+    [anon_sym_let] = ACTIONS(1),
+    [anon_sym_EQ] = ACTIONS(1),
+    [anon_sym_SEMI] = ACTIONS(1),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(1)] = {
+    [sym_source_file] = STATE(11),
+    [sym_expression] = STATE(5),
+    [sym_statement] = STATE(5),
+    [sym_variable_declaration] = STATE(6),
+    [sym_line_comment] = STATE(1),
+    [aux_sym_source_file_repeat1] = STATE(2),
+    [ts_builtin_sym_end] = ACTIONS(5),
+    [sym_identifier] = ACTIONS(7),
+    [sym_number] = ACTIONS(9),
+    [sym_string] = ACTIONS(9),
+    [anon_sym_let] = ACTIONS(11),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(2)] = {
+    [sym_expression] = STATE(5),
+    [sym_statement] = STATE(5),
+    [sym_variable_declaration] = STATE(6),
+    [sym_line_comment] = STATE(2),
+    [aux_sym_source_file_repeat1] = STATE(3),
+    [ts_builtin_sym_end] = ACTIONS(13),
+    [sym_identifier] = ACTIONS(7),
+    [sym_number] = ACTIONS(9),
+    [sym_string] = ACTIONS(9),
+    [anon_sym_let] = ACTIONS(11),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(3)] = {
+    [sym_expression] = STATE(5),
+    [sym_statement] = STATE(5),
+    [sym_variable_declaration] = STATE(6),
+    [sym_line_comment] = STATE(3),
+    [aux_sym_source_file_repeat1] = STATE(3),
+    [ts_builtin_sym_end] = ACTIONS(15),
+    [sym_identifier] = ACTIONS(17),
+    [sym_number] = ACTIONS(20),
+    [sym_string] = ACTIONS(20),
+    [anon_sym_let] = ACTIONS(23),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+  [STATE(4)] = {
+    [sym_line_comment] = STATE(4),
+    [ts_builtin_sym_end] = ACTIONS(26),
+    [sym_identifier] = ACTIONS(28),
+    [sym_number] = ACTIONS(26),
+    [sym_string] = ACTIONS(26),
+    [anon_sym_let] = ACTIONS(28),
+    [anon_sym_SEMI] = ACTIONS(26),
+    [anon_sym_SLASH_SLASH] = ACTIONS(3),
+  },
+};
+
+static const uint16_t ts_small_parse_table[] = {
+  [0] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(5), 1,
+      sym_line_comment,
+    ACTIONS(32), 2,
+      sym_identifier,
+      anon_sym_let,
+    ACTIONS(30), 3,
+      ts_builtin_sym_end,
+      sym_number,
+      sym_string,
+  [16] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(6), 1,
+      sym_line_comment,
+    ACTIONS(36), 2,
+      sym_identifier,
+      anon_sym_let,
+    ACTIONS(34), 3,
+      ts_builtin_sym_end,
+      sym_number,
+      sym_string,
+  [32] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(7), 1,
+      sym_line_comment,
+    ACTIONS(40), 2,
+      sym_identifier,
+      anon_sym_let,
+    ACTIONS(38), 3,
+      ts_builtin_sym_end,
+      sym_number,
+      sym_string,
+  [48] = 4,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    STATE(8), 1,
+      sym_line_comment,
+    STATE(13), 1,
+      sym_expression,
+    ACTIONS(9), 3,
+      sym_identifier,
+      sym_number,
+      sym_string,
+  [63] = 3,
+    ACTIONS(42), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(44), 1,
+      aux_sym_line_comment_token1,
+    STATE(9), 1,
+      sym_line_comment,
+  [73] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(46), 1,
+      sym_identifier,
+    STATE(10), 1,
+      sym_line_comment,
+  [83] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(48), 1,
+      ts_builtin_sym_end,
+    STATE(11), 1,
+      sym_line_comment,
+  [93] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(50), 1,
+      anon_sym_EQ,
+    STATE(12), 1,
+      sym_line_comment,
+  [103] = 3,
+    ACTIONS(3), 1,
+      anon_sym_SLASH_SLASH,
+    ACTIONS(52), 1,
+      anon_sym_SEMI,
+    STATE(13), 1,
+      sym_line_comment,
+  [113] = 1,
+    ACTIONS(54), 1,
+      ts_builtin_sym_end,
+};
+
+static const uint32_t ts_small_parse_table_map[] = {
+  [SMALL_STATE(5)] = 0,
+  [SMALL_STATE(6)] = 16,
+  [SMALL_STATE(7)] = 32,
+  [SMALL_STATE(8)] = 48,
+  [SMALL_STATE(9)] = 63,
+  [SMALL_STATE(10)] = 73,
+  [SMALL_STATE(11)] = 83,
+  [SMALL_STATE(12)] = 93,
+  [SMALL_STATE(13)] = 103,
+  [SMALL_STATE(14)] = 113,
+};
+
+static const TSParseActionEntry ts_parse_actions[] = {
+  [0] = {.entry = {.count = 0, .reusable = false}},
+  [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(),
+  [3] = {.entry = {.count = 1, .reusable = true}}, SHIFT(9),
+  [5] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 0, 0, 0),
+  [7] = {.entry = {.count = 1, .reusable = false}}, SHIFT(4),
+  [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4),
+  [11] = {.entry = {.count = 1, .reusable = false}}, SHIFT(10),
+  [13] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1, 0, 0),
+  [15] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0),
+  [17] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(4),
+  [20] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(4),
+  [23] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2, 0, 0), SHIFT_REPEAT(10),
+  [26] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_expression, 1, 0, 0),
+  [28] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_expression, 1, 0, 0),
+  [30] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 1, 0, 0),
+  [32] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 1, 0, 0),
+  [34] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_statement, 1, 0, 0),
+  [36] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_statement, 1, 0, 0),
+  [38] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_variable_declaration, 5, 0, 0),
+  [40] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_variable_declaration, 5, 0, 0),
+  [42] = {.entry = {.count = 1, .reusable = false}}, SHIFT(9),
+  [44] = {.entry = {.count = 1, .reusable = false}}, SHIFT(14),
+  [46] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12),
+  [48] = {.entry = {.count = 1, .reusable = true}},  ACCEPT_INPUT(),
+  [50] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8),
+  [52] = {.entry = {.count = 1, .reusable = true}}, SHIFT(7),
+  [54] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_line_comment, 2, 0, 0),
+};
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#ifdef TREE_SITTER_HIDE_SYMBOLS
+#define TS_PUBLIC
+#elif defined(_WIN32)
+#define TS_PUBLIC __declspec(dllexport)
+#else
+#define TS_PUBLIC __attribute__((visibility("default")))
+#endif
+
+TS_PUBLIC const TSLanguage *tree_sitter_test_lang(void) {
+  static const TSLanguage language = {
+    .abi_version = LANGUAGE_VERSION,
+    .symbol_count = SYMBOL_COUNT,
+    .alias_count = ALIAS_COUNT,
+    .token_count = TOKEN_COUNT,
+    .external_token_count = EXTERNAL_TOKEN_COUNT,
+    .state_count = STATE_COUNT,
+    .large_state_count = LARGE_STATE_COUNT,
+    .production_id_count = PRODUCTION_ID_COUNT,
+    .field_count = FIELD_COUNT,
+    .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
+    .parse_table = &ts_parse_table[0][0],
+    .small_parse_table = ts_small_parse_table,
+    .small_parse_table_map = ts_small_parse_table_map,
+    .parse_actions = ts_parse_actions,
+    .symbol_names = ts_symbol_names,
+    .symbol_metadata = ts_symbol_metadata,
+    .public_symbol_map = ts_symbol_map,
+    .alias_map = ts_non_terminal_alias_map,
+    .alias_sequences = &ts_alias_sequences[0][0],
+    .lex_modes = (const void*)ts_lex_modes,
+    .lex_fn = ts_lex,
+    .keyword_lex_fn = ts_lex_keywords,
+    .keyword_capture_token = sym_identifier,
+    .primary_state_ids = ts_primary_state_ids,
+  };
+  return &language;
+}
+#ifdef __cplusplus
+}
+#endif
diff --git a/tree-sitter/dsk/test-build/src/tree_sitter/alloc.h b/tree-sitter/dsk/test-build/src/tree_sitter/alloc.h
new file mode 100644
index 0000000..1abdd12
--- /dev/null
+++ b/tree-sitter/dsk/test-build/src/tree_sitter/alloc.h
@@ -0,0 +1,54 @@
+#ifndef TREE_SITTER_ALLOC_H_
+#define TREE_SITTER_ALLOC_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+// Allow clients to override allocation functions
+#ifdef TREE_SITTER_REUSE_ALLOCATOR
+
+extern void *(*ts_current_malloc)(size_t size);
+extern void *(*ts_current_calloc)(size_t count, size_t size);
+extern void *(*ts_current_realloc)(void *ptr, size_t size);
+extern void (*ts_current_free)(void *ptr);
+
+#ifndef ts_malloc
+#define ts_malloc  ts_current_malloc
+#endif
+#ifndef ts_calloc
+#define ts_calloc  ts_current_calloc
+#endif
+#ifndef ts_realloc
+#define ts_realloc ts_current_realloc
+#endif
+#ifndef ts_free
+#define ts_free    ts_current_free
+#endif
+
+#else
+
+#ifndef ts_malloc
+#define ts_malloc  malloc
+#endif
+#ifndef ts_calloc
+#define ts_calloc  calloc
+#endif
+#ifndef ts_realloc
+#define ts_realloc realloc
+#endif
+#ifndef ts_free
+#define ts_free    free
+#endif
+
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // TREE_SITTER_ALLOC_H_
diff --git a/tree-sitter/dsk/test-build/src/tree_sitter/array.h b/tree-sitter/dsk/test-build/src/tree_sitter/array.h
new file mode 100644
index 0000000..a17a574
--- /dev/null
+++ b/tree-sitter/dsk/test-build/src/tree_sitter/array.h
@@ -0,0 +1,291 @@
+#ifndef TREE_SITTER_ARRAY_H_
+#define TREE_SITTER_ARRAY_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "./alloc.h"
+
+#include <assert.h>
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _MSC_VER
+#pragma warning(push)
+#pragma warning(disable : 4101)
+#elif defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunused-variable"
+#endif
+
+#define Array(T)       \
+  struct {             \
+    T *contents;       \
+    uint32_t size;     \
+    uint32_t capacity; \
+  }
+
+/// Initialize an array.
+#define array_init(self) \
+  ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL)
+
+/// Create an empty array.
+#define array_new() \
+  { NULL, 0, 0 }
+
+/// Get a pointer to the element at a given `index` in the array.
+#define array_get(self, _index) \
+  (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index])
+
+/// Get a pointer to the first element in the array.
+#define array_front(self) array_get(self, 0)
+
+/// Get a pointer to the last element in the array.
+#define array_back(self) array_get(self, (self)->size - 1)
+
+/// Clear the array, setting its size to zero. Note that this does not free any
+/// memory allocated for the array's contents.
+#define array_clear(self) ((self)->size = 0)
+
+/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is
+/// less than the array's current capacity, this function has no effect.
+#define array_reserve(self, new_capacity) \
+  _array__reserve((Array *)(self), array_elem_size(self), new_capacity)
+
+/// Free any memory allocated for this array. Note that this does not free any
+/// memory allocated for the array's contents.
+#define array_delete(self) _array__delete((Array *)(self))
+
+/// Push a new `element` onto the end of the array.
+#define array_push(self, element)                            \
+  (_array__grow((Array *)(self), 1, array_elem_size(self)), \
+   (self)->contents[(self)->size++] = (element))
+
+/// Increase the array's size by `count` elements.
+/// New elements are zero-initialized.
+#define array_grow_by(self, count) \
+  do { \
+    if ((count) == 0) break; \
+    _array__grow((Array *)(self), count, array_elem_size(self)); \
+    memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \
+    (self)->size += (count); \
+  } while (0)
+
+/// Append all elements from one array to the end of another.
+#define array_push_all(self, other)                                       \
+  array_extend((self), (other)->size, (other)->contents)
+
+/// Append `count` elements to the end of the array, reading their values from the
+/// `contents` pointer.
+#define array_extend(self, count, contents)                    \
+  _array__splice(                                               \
+    (Array *)(self), array_elem_size(self), (self)->size, \
+    0, count,  contents                                        \
+  )
+
+/// Remove `old_count` elements from the array starting at the given `index`. At
+/// the same index, insert `new_count` new elements, reading their values from the
+/// `new_contents` pointer.
+#define array_splice(self, _index, old_count, new_count, new_contents)  \
+  _array__splice(                                                       \
+    (Array *)(self), array_elem_size(self), _index,                \
+    old_count, new_count, new_contents                                 \
+  )
+
+/// Insert one `element` into the array at the given `index`.
+#define array_insert(self, _index, element) \
+  _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element))
+
+/// Remove one element from the array at the given `index`.
+#define array_erase(self, _index) \
+  _array__erase((Array *)(self), array_elem_size(self), _index)
+
+/// Pop the last element off the array, returning the element by value.
+#define array_pop(self) ((self)->contents[--(self)->size])
+
+/// Assign the contents of one array to another, reallocating if necessary.
+#define array_assign(self, other) \
+  _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self))
+
+/// Swap one array with another
+#define array_swap(self, other) \
+  _array__swap((Array *)(self), (Array *)(other))
+
+/// Get the size of the array contents
+#define array_elem_size(self) (sizeof *(self)->contents)
+
+/// Search a sorted array for a given `needle` value, using the given `compare`
+/// callback to determine the order.
+///
+/// If an existing element is found to be equal to `needle`, then the `index`
+/// out-parameter is set to the existing value's index, and the `exists`
+/// out-parameter is set to true. Otherwise, `index` is set to an index where
+/// `needle` should be inserted in order to preserve the sorting, and `exists`
+/// is set to false.
+#define array_search_sorted_with(self, compare, needle, _index, _exists) \
+  _array__search_sorted(self, 0, compare, , needle, _index, _exists)
+
+/// Search a sorted array for a given `needle` value, using integer comparisons
+/// of a given struct field (specified with a leading dot) to determine the order.
+///
+/// See also `array_search_sorted_with`.
+#define array_search_sorted_by(self, field, needle, _index, _exists) \
+  _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists)
+
+/// Insert a given `value` into a sorted array, using the given `compare`
+/// callback to determine the order.
+#define array_insert_sorted_with(self, compare, value) \
+  do { \
+    unsigned _index, _exists; \
+    array_search_sorted_with(self, compare, &(value), &_index, &_exists); \
+    if (!_exists) array_insert(self, _index, value); \
+  } while (0)
+
+/// Insert a given `value` into a sorted array, using integer comparisons of
+/// a given struct field (specified with a leading dot) to determine the order.
+///
+/// See also `array_search_sorted_by`.
+#define array_insert_sorted_by(self, field, value) \
+  do { \
+    unsigned _index, _exists; \
+    array_search_sorted_by(self, field, (value) field, &_index, &_exists); \
+    if (!_exists) array_insert(self, _index, value); \
+  } while (0)
+
+// Private
+
+typedef Array(void) Array;
+
+/// This is not what you're looking for, see `array_delete`.
+static inline void _array__delete(Array *self) {
+  if (self->contents) {
+    ts_free(self->contents);
+    self->contents = NULL;
+    self->size = 0;
+    self->capacity = 0;
+  }
+}
+
+/// This is not what you're looking for, see `array_erase`.
+static inline void _array__erase(Array *self, size_t element_size,
+                                uint32_t index) {
+  assert(index < self->size);
+  char *contents = (char *)self->contents;
+  memmove(contents + index * element_size, contents + (index + 1) * element_size,
+          (self->size - index - 1) * element_size);
+  self->size--;
+}
+
+/// This is not what you're looking for, see `array_reserve`.
+static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) {
+  if (new_capacity > self->capacity) {
+    if (self->contents) {
+      self->contents = ts_realloc(self->contents, new_capacity * element_size);
+    } else {
+      self->contents = ts_malloc(new_capacity * element_size);
+    }
+    self->capacity = new_capacity;
+  }
+}
+
+/// This is not what you're looking for, see `array_assign`.
+static inline void _array__assign(Array *self, const Array *other, size_t element_size) {
+  _array__reserve(self, element_size, other->size);
+  self->size = other->size;
+  memcpy(self->contents, other->contents, self->size * element_size);
+}
+
+/// This is not what you're looking for, see `array_swap`.
+static inline void _array__swap(Array *self, Array *other) {
+  Array swap = *other;
+  *other = *self;
+  *self = swap;
+}
+
+/// This is not what you're looking for, see `array_push` or `array_grow_by`.
+static inline void _array__grow(Array *self, uint32_t count, size_t element_size) {
+  uint32_t new_size = self->size + count;
+  if (new_size > self->capacity) {
+    uint32_t new_capacity = self->capacity * 2;
+    if (new_capacity < 8) new_capacity = 8;
+    if (new_capacity < new_size) new_capacity = new_size;
+    _array__reserve(self, element_size, new_capacity);
+  }
+}
+
+/// This is not what you're looking for, see `array_splice`.
+static inline void _array__splice(Array *self, size_t element_size,
+                                 uint32_t index, uint32_t old_count,
+                                 uint32_t new_count, const void *elements) {
+  uint32_t new_size = self->size + new_count - old_count;
+  uint32_t old_end = index + old_count;
+  uint32_t new_end = index + new_count;
+  assert(old_end <= self->size);
+
+  _array__reserve(self, element_size, new_size);
+
+  char *contents = (char *)self->contents;
+  if (self->size > old_end) {
+    memmove(
+      contents + new_end * element_size,
+      contents + old_end * element_size,
+      (self->size - old_end) * element_size
+    );
+  }
+  if (new_count > 0) {
+    if (elements) {
+      memcpy(
+        (contents + index * element_size),
+        elements,
+        new_count * element_size
+      );
+    } else {
+      memset(
+        (contents + index * element_size),
+        0,
+        new_count * element_size
+      );
+    }
+  }
+  self->size += new_count - old_count;
+}
+
+/// A binary search routine, based on Rust's `std::slice::binary_search_by`.
+/// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`.
+#define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \
+  do { \
+    *(_index) = start; \
+    *(_exists) = false; \
+    uint32_t size = (self)->size - *(_index); \
+    if (size == 0) break; \
+    int comparison; \
+    while (size > 1) { \
+      uint32_t half_size = size / 2; \
+      uint32_t mid_index = *(_index) + half_size; \
+      comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \
+      if (comparison <= 0) *(_index) = mid_index; \
+      size -= half_size; \
+    } \
+    comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \
+    if (comparison == 0) *(_exists) = true; \
+    else if (comparison < 0) *(_index) += 1; \
+  } while (0)
+
+/// Helper macro for the `_sorted_by` routines below. This takes the left (existing)
+/// parameter by reference in order to work with the generic sorting function above.
+#define _compare_int(a, b) ((int)*(a) - (int)(b))
+
+#ifdef _MSC_VER
+#pragma warning(pop)
+#elif defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic pop
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // TREE_SITTER_ARRAY_H_
diff --git a/tree-sitter/dsk/test-build/src/tree_sitter/parser.h b/tree-sitter/dsk/test-build/src/tree_sitter/parser.h
new file mode 100644
index 0000000..858107d
--- /dev/null
+++ b/tree-sitter/dsk/test-build/src/tree_sitter/parser.h
@@ -0,0 +1,286 @@
+#ifndef TREE_SITTER_PARSER_H_
+#define TREE_SITTER_PARSER_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdlib.h>
+
+#define ts_builtin_sym_error ((TSSymbol)-1)
+#define ts_builtin_sym_end 0
+#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
+
+#ifndef TREE_SITTER_API_H_
+typedef uint16_t TSStateId;
+typedef uint16_t TSSymbol;
+typedef uint16_t TSFieldId;
+typedef struct TSLanguage TSLanguage;
+typedef struct TSLanguageMetadata {
+  uint8_t major_version;
+  uint8_t minor_version;
+  uint8_t patch_version;
+} TSLanguageMetadata;
+#endif
+
+typedef struct {
+  TSFieldId field_id;
+  uint8_t child_index;
+  bool inherited;
+} TSFieldMapEntry;
+
+// Used to index the field and supertype maps.
+typedef struct {
+  uint16_t index;
+  uint16_t length;
+} TSMapSlice;
+
+typedef struct {
+  bool visible;
+  bool named;
+  bool supertype;
+} TSSymbolMetadata;
+
+typedef struct TSLexer TSLexer;
+
+struct TSLexer {
+  int32_t lookahead;
+  TSSymbol result_symbol;
+  void (*advance)(TSLexer *, bool);
+  void (*mark_end)(TSLexer *);
+  uint32_t (*get_column)(TSLexer *);
+  bool (*is_at_included_range_start)(const TSLexer *);
+  bool (*eof)(const TSLexer *);
+  void (*log)(const TSLexer *, const char *, ...);
+};
+
+typedef enum {
+  TSParseActionTypeShift,
+  TSParseActionTypeReduce,
+  TSParseActionTypeAccept,
+  TSParseActionTypeRecover,
+} TSParseActionType;
+
+typedef union {
+  struct {
+    uint8_t type;
+    TSStateId state;
+    bool extra;
+    bool repetition;
+  } shift;
+  struct {
+    uint8_t type;
+    uint8_t child_count;
+    TSSymbol symbol;
+    int16_t dynamic_precedence;
+    uint16_t production_id;
+  } reduce;
+  uint8_t type;
+} TSParseAction;
+
+typedef struct {
+  uint16_t lex_state;
+  uint16_t external_lex_state;
+} TSLexMode;
+
+typedef struct {
+  uint16_t lex_state;
+  uint16_t external_lex_state;
+  uint16_t reserved_word_set_id;
+} TSLexerMode;
+
+typedef union {
+  TSParseAction action;
+  struct {
+    uint8_t count;
+    bool reusable;
+  } entry;
+} TSParseActionEntry;
+
+typedef struct {
+  int32_t start;
+  int32_t end;
+} TSCharacterRange;
+
+struct TSLanguage {
+  uint32_t abi_version;
+  uint32_t symbol_count;
+  uint32_t alias_count;
+  uint32_t token_count;
+  uint32_t external_token_count;
+  uint32_t state_count;
+  uint32_t large_state_count;
+  uint32_t production_id_count;
+  uint32_t field_count;
+  uint16_t max_alias_sequence_length;
+  const uint16_t *parse_table;
+  const uint16_t *small_parse_table;
+  const uint32_t *small_parse_table_map;
+  const TSParseActionEntry *parse_actions;
+  const char * const *symbol_names;
+  const char * const *field_names;
+  const TSMapSlice *field_map_slices;
+  const TSFieldMapEntry *field_map_entries;
+  const TSSymbolMetadata *symbol_metadata;
+  const TSSymbol *public_symbol_map;
+  const uint16_t *alias_map;
+  const TSSymbol *alias_sequences;
+  const TSLexerMode *lex_modes;
+  bool (*lex_fn)(TSLexer *, TSStateId);
+  bool (*keyword_lex_fn)(TSLexer *, TSStateId);
+  TSSymbol keyword_capture_token;
+  struct {
+    const bool *states;
+    const TSSymbol *symbol_map;
+    void *(*create)(void);
+    void (*destroy)(void *);
+    bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
+    unsigned (*serialize)(void *, char *);
+    void (*deserialize)(void *, const char *, unsigned);
+  } external_scanner;
+  const TSStateId *primary_state_ids;
+  const char *name;
+  const TSSymbol *reserved_words;
+  uint16_t max_reserved_word_set_size;
+  uint32_t supertype_count;
+  const TSSymbol *supertype_symbols;
+  const TSMapSlice *supertype_map_slices;
+  const TSSymbol *supertype_map_entries;
+  TSLanguageMetadata metadata;
+};
+
+static inline bool set_contains(const TSCharacterRange *ranges, uint32_t len, int32_t lookahead) {
+  uint32_t index = 0;
+  uint32_t size = len - index;
+  while (size > 1) {
+    uint32_t half_size = size / 2;
+    uint32_t mid_index = index + half_size;
+    const TSCharacterRange *range = &ranges[mid_index];
+    if (lookahead >= range->start && lookahead <= range->end) {
+      return true;
+    } else if (lookahead > range->end) {
+      index = mid_index;
+    }
+    size -= half_size;
+  }
+  const TSCharacterRange *range = &ranges[index];
+  return (lookahead >= range->start && lookahead <= range->end);
+}
+
+/*
+ *  Lexer Macros
+ */
+
+#ifdef _MSC_VER
+#define UNUSED __pragma(warning(suppress : 4101))
+#else
+#define UNUSED __attribute__((unused))
+#endif
+
+#define START_LEXER()           \
+  bool result = false;          \
+  bool skip = false;            \
+  UNUSED                        \
+  bool eof = false;             \
+  int32_t lookahead;            \
+  goto start;                   \
+  next_state:                   \
+  lexer->advance(lexer, skip);  \
+  start:                        \
+  skip = false;                 \
+  lookahead = lexer->lookahead;
+
+#define ADVANCE(state_value) \
+  {                          \
+    state = state_value;     \
+    goto next_state;         \
+  }
+
+#define ADVANCE_MAP(...)                                              \
+  {                                                                   \
+    static const uint16_t map[] = { __VA_ARGS__ };                    \
+    for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) {  \
+      if (map[i] == lookahead) {                                      \
+        state = map[i + 1];                                           \
+        goto next_state;                                              \
+      }                                                               \
+    }                                                                 \
+  }
+
+#define SKIP(state_value) \
+  {                       \
+    skip = true;          \
+    state = state_value;  \
+    goto next_state;      \
+  }
+
+#define ACCEPT_TOKEN(symbol_value)     \
+  result = true;                       \
+  lexer->result_symbol = symbol_value; \
+  lexer->mark_end(lexer);
+
+#define END_STATE() return result;
+
+/*
+ *  Parse Table Macros
+ */
+
+#define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT)
+
+#define STATE(id) id
+
+#define ACTIONS(id) id
+
+#define SHIFT(state_value)            \
+  {{                                  \
+    .shift = {                        \
+      .type = TSParseActionTypeShift, \
+      .state = (state_value)          \
+    }                                 \
+  }}
+
+#define SHIFT_REPEAT(state_value)     \
+  {{                                  \
+    .shift = {                        \
+      .type = TSParseActionTypeShift, \
+      .state = (state_value),         \
+      .repetition = true              \
+    }                                 \
+  }}
+
+#define SHIFT_EXTRA()                 \
+  {{                                  \
+    .shift = {                        \
+      .type = TSParseActionTypeShift, \
+      .extra = true                   \
+    }                                 \
+  }}
+
+#define REDUCE(symbol_name, children, precedence, prod_id) \
+  {{                                                       \
+    .reduce = {                                            \
+      .type = TSParseActionTypeReduce,                     \
+      .symbol = symbol_name,                               \
+      .child_count = children,                             \
+      .dynamic_precedence = precedence,                    \
+      .production_id = prod_id                             \
+    },                                                     \
+  }}
+
+#define RECOVER()                    \
+  {{                                 \
+    .type = TSParseActionTypeRecover \
+  }}
+
+#define ACCEPT_INPUT()              \
+  {{                                \
+    .type = TSParseActionTypeAccept \
+  }}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // TREE_SITTER_PARSER_H_