From 05879d4f9923e737a685776a68f527f752d56263 Mon Sep 17 00:00:00 2001 From: "Kartik K. Agaram" Date: Thu, 29 Apr 2021 16:10:30 -0700 Subject: load large definitions --- shell/tokenize.mu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shell/tokenize.mu b/shell/tokenize.mu index 91e12882..66d76718 100644 --- a/shell/tokenize.mu +++ b/shell/tokenize.mu @@ -15,7 +15,7 @@ fn tokenize in: (addr gap-buffer), out: (addr stream cell), trace: (addr trace) break-if-!= # initialize token data each iteration to avoid aliasing var dest-ah/eax: (addr handle stream byte) <- get token, text-data - populate-stream dest-ah, 0x40/max-token-size + populate-stream dest-ah, 0x100/max-definition-size # next-token in, token, trace var error?/eax: boolean <- has-errors? trace -- cgit 1.4.1-2-gfad0