diff --git a/manifest.json b/manifest.json index 27dcff3..b3249aa 100644 --- a/manifest.json +++ b/manifest.json @@ -1,7 +1,7 @@ { "manifest_version": 2, "name": "SpaceLLama", - "version": "1.4", + "version": "1.5", "description": "Summarize web pages using Ollama. Supports custom models, token limits, system prompts, chunking, and more. See https://github.com/tcsenpai/spacellama for more information.", "permissions": ["activeTab", "storage", "", "tabs"], "browser_action": { diff --git a/options/options.html b/options/options.html index b344235..b19b715 100644 --- a/options/options.html +++ b/options/options.html @@ -23,11 +23,16 @@
- +
- +
diff --git a/options/options.js b/options/options.js index b6df426..b6733f7 100644 --- a/options/options.js +++ b/options/options.js @@ -1,5 +1,10 @@ -let browser = (typeof chrome !== 'undefined') ? chrome : (typeof browser !== 'undefined') ? browser : null; - +let browser = + typeof chrome !== "undefined" + ? chrome + : typeof browser !== "undefined" + ? browser + : null; + async function validateEndpoint(endpoint) { try { const response = await fetch(`${endpoint}/api/tags`); @@ -25,7 +30,7 @@ async function updateTokenLimit() { if (model in modelTokens) { tokenLimitInput.value = modelTokens[model]; } else { - tokenLimitInput.value = 4000; // Default value, modified from 4096 to meet even requirement + tokenLimitInput.value = 16384; // Default value } } catch (error) { console.error("Error updating token limit:", error.message || error); @@ -34,7 +39,7 @@ async function updateTokenLimit() { async function loadModelTokens() { try { - const response = await fetch(browser.runtime.getURL('model_tokens.json')); + const response = await fetch(browser.runtime.getURL("model_tokens.json")); return await response.json(); } catch (error) { console.error("Error loading model tokens:", error.message || error); @@ -47,7 +52,7 @@ async function saveOptions(e) { const model = document.getElementById("model").value; const systemPrompt = document.getElementById("system-prompt").value; const status = document.getElementById("status"); - const tokenLimit = document.getElementById("token-limit").value || 4096; + const tokenLimit = document.getElementById("token-limit").value || 16384; // Ensure the endpoint doesn't end with /api/generate const cleanEndpoint = endpoint.replace(/\/api\/generate\/?$/, ""); status.textContent = "Validating endpoint..."; @@ -67,7 +72,7 @@ async function saveOptions(e) { }, 2000); } else { status.textContent = - "Invalid endpoint. Please check the URL and try again."; + "Invalid endpoint. Please check the URL and try again."; } } catch (error) { console.error("Error saving options:", error.message || error); @@ -76,33 +81,40 @@ async function saveOptions(e) { } function restoreOptions() { - browser.storage.local.get({ - ollamaEndpoint: "http://localhost:11434", - ollamaModel: "llama2", - systemPrompt: "You are a helpful AI assistant. Summarize the given text concisely.", - tokenLimit: 4096 - }, function(result) { - document.getElementById("endpoint").value = result.ollamaEndpoint || "http://localhost:11434"; - document.getElementById("model").value = result.ollamaModel || "llama2"; - document.getElementById("system-prompt").value = result.systemPrompt || "You are a helpful AI assistant. Summarize the given text concisely."; + browser.storage.local.get( + { + ollamaEndpoint: "http://localhost:11434", + ollamaModel: "llama3.1:latest", + systemPrompt: + "You are a helpful AI assistant. Summarize the given text concisely.", + tokenLimit: 16384, + }, + function (result) { + document.getElementById("endpoint").value = + result.ollamaEndpoint || "http://localhost:11434"; + document.getElementById("model").value = + result.ollamaModel || "llama3.1:latest"; + document.getElementById("system-prompt").value = + result.systemPrompt || + "You are a helpful AI assistant. Summarize the given text concisely."; - // Call to updateTokenLimit remains async - updateTokenLimit().then(() => { - validateEndpoint(result.ollamaEndpoint).then(isValid => { - updateEndpointStatus(isValid); + // Call to updateTokenLimit remains async + updateTokenLimit().then(() => { + validateEndpoint(result.ollamaEndpoint).then((isValid) => { + updateEndpointStatus(isValid); + }); }); - }); - }); + } + ); } - document.addEventListener("DOMContentLoaded", restoreOptions); document .getElementById("settings-form") .addEventListener("submit", saveOptions); document.getElementById("endpoint").addEventListener("blur", async (e) => { -const isValid = await validateEndpoint(e.target.value); -updateEndpointStatus(isValid); + const isValid = await validateEndpoint(e.target.value); + updateEndpointStatus(isValid); -document.getElementById("model").addEventListener("change", updateTokenLimit); -}); \ No newline at end of file + document.getElementById("model").addEventListener("change", updateTokenLimit); +});