diff options
author | elioat <elioat@tilde.institute> | 2025-01-05 08:11:02 -0500 |
---|---|---|
committer | elioat <elioat@tilde.institute> | 2025-01-05 08:11:02 -0500 |
commit | e94262a4f6423cab4fa909a788a86a150a8829ac (patch) | |
tree | a7064d1989b599812be38b034a14a55c76ce6bcb /html/matt-chat | |
parent | 01eb4d24bfc0cad32bf2ee50d41a28f42254ba9c (diff) | |
download | tour-e94262a4f6423cab4fa909a788a86a150a8829ac.tar.gz |
*
Diffstat (limited to 'html/matt-chat')
-rw-r--r-- | html/matt-chat/index.html | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/html/matt-chat/index.html b/html/matt-chat/index.html index 3ae641f..c9e44c0 100644 --- a/html/matt-chat/index.html +++ b/html/matt-chat/index.html @@ -179,9 +179,9 @@ // The context window size is the number of previous exchanges to keep... // though this is relatively naive at the moment const config = { - apiUrl: "http://localhost:11434/v1/chat/completions", - API_URL: "http://localhost:11434/v1", - API_MODELS_ENDPOINT: "http://localhost:11434/v1/models", + apiUrl: "http://localhost:11434/v1", + completionsEndpoint: "http://localhost:11434/v1/chat/completions", + modelsEndpoint: "http://localhost:11434/v1/models", contextWindowSize: 6, // Number of previous exchanges to remember systemMessage: "You are a helpful assistant. If you don't know something you'll let me know. Your name is Matt.", // Set the mood and personality for the LLM's responses maxTokens: 4096, // Approximate max tokens for most models @@ -196,7 +196,7 @@ let isCatMode = false; // Flag to track cat mode - const API_MODELS_ENDPOINT = config.API_MODELS_ENDPOINT; + const API_MODELS_ENDPOINT = config.modelsEndpoint; // Function to handle errors function handleError(message) { @@ -218,7 +218,7 @@ const modelIds = []; try { - const response = await fetch(config.API_MODELS_ENDPOINT); + const response = await fetch(config.modelsEndpoint); if (!response.ok) throw new Error('Failed to fetch models'); const data = await response.json(); @@ -387,7 +387,7 @@ // Prepare the messages for the API const messagesToSend = await prepareMessages(userMessage); - const response = await fetch(config.apiUrl, { + const response = await fetch(config.completionsEndpoint, { method: "POST", headers: { "Content-Type": "application/json", @@ -532,7 +532,7 @@ Available commands:\n const modelSelect = document.getElementById("model-select"); const selectedModel = modelSelect.value; - const response = await fetch(config.apiUrl, { + const response = await fetch(config.completionsEndpoint, { method: "POST", headers: { "Content-Type": "application/json", |