diff --git a/src/extension.ts b/src/extension.ts index 24fc213..43427aa 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -9,7 +9,7 @@ export async function activate(context: ExtensionContext) { const setupGraniteCmd = commands.registerCommand("vscode-granite.setup", async () => { await Telemetry.send("granite.commands.setup"); await ollamaLibraryWarmup(DOWNLOADABLE_MODELS); - SetupGranitePage.render(context.extensionUri, context.extensionMode); + SetupGranitePage.render(context); }); context.subscriptions.push(setupGraniteCmd); const hasRunBefore = context.globalState.get('hasRunSetup', false); diff --git a/src/modelServer.ts b/src/modelServer.ts index 3504a4f..68bb6c3 100644 --- a/src/modelServer.ts +++ b/src/modelServer.ts @@ -8,7 +8,7 @@ export interface IModelServer { installServer(mode: string): Promise; getModelStatus(modelName?: string): Promise installModel(modelName: string, reportProgress: (progress: ProgressData) => void): Promise; - supportedInstallModes(): Promise<{ id: string; label: string }[]>; //manual, script, homebrew + supportedInstallModes(): Promise<{ id: string; label: string, supportsRefresh: boolean }[]>; //manual, script, homebrew configureAssistant( chatModelName: string | null, tabModelName: string | null, diff --git a/src/ollama/mockServer.ts b/src/ollama/mockServer.ts index d6ccc41..beadd10 100644 --- a/src/ollama/mockServer.ts +++ b/src/ollama/mockServer.ts @@ -1,5 +1,5 @@ //Mock server for testing -import { CancellationError, env, Progress, ProgressLocation, Uri, window } from "vscode"; +import { CancellationError, env, ExtensionContext, Progress, ProgressLocation, Uri, window } from "vscode"; import { getStandardName } from "../commons/naming"; import { ProgressData } from "../commons/progressData"; import { ModelStatus, ServerStatus } from "../commons/statuses"; @@ -50,7 +50,7 @@ export class MockServer extends OllamaServer implements IModelServer { * will simulate download operations. */ constructor(private speed: number) { - super("Mock Server"); + super({} as ExtensionContext, "Mock Server"); this.speed *= 1024 * 1024; // Convert speed to bytes per second } async startServer(): Promise { @@ -218,8 +218,8 @@ export class MockServer extends OllamaServer implements IModelServer { }); } - async supportedInstallModes(): Promise<{ id: string; label: string; }[]> { - return Promise.resolve([{ id: 'mock', label: 'Install Magically' }, { id: 'manual', label: 'Install Manually' }]); + async supportedInstallModes(): Promise<{ id: string; label: string; supportsRefresh: boolean }[]> { + return Promise.resolve([{ id: 'mock', label: 'Install Magically', supportsRefresh: true }, { id: 'manual', label: 'Install Manually', supportsRefresh: true }]); } async listModels(): Promise { diff --git a/src/ollama/ollamaServer.ts b/src/ollama/ollamaServer.ts index 97a2e63..7454027 100644 --- a/src/ollama/ollamaServer.ts +++ b/src/ollama/ollamaServer.ts @@ -1,5 +1,6 @@ import os from "os"; -import { CancellationError, env, Progress, ProgressLocation, Uri, window } from "vscode"; +import path from 'path'; +import { CancellationError, env, ExtensionContext, Progress, ProgressLocation, Uri, window } from "vscode"; import { DEFAULT_MODEL_INFO, ModelInfo } from "../commons/modelInfo"; import { getStandardName } from "../commons/naming"; import { ProgressData } from "../commons/progressData"; @@ -16,24 +17,28 @@ export class OllamaServer implements IModelServer { private currentStatus = ServerStatus.unknown; protected installingModels = new Set(); - constructor(private name: string = "Ollama", private serverUrl = "http://localhost:11434") { } + constructor(private context: ExtensionContext, private name: string = "Ollama", private serverUrl = "http://localhost:11434") { } getName(): string { return this.name; } - async supportedInstallModes(): Promise<{ id: string; label: string }[]> { + async supportedInstallModes(): Promise<{ id: string; label: string, supportsRefresh: boolean }[]> { const modes = []; - + if (isLinux()) { + if (isDevspaces()) { + // sudo is not available in devspaces, so we can't use ollama's or manual install script + return [{ id: "devspaces", label: "See Red Hat Dev Spaces instructions", supportsRefresh: false }]; + } else { + // on linux + modes.push({ id: "script", label: "Install with script", supportsRefresh: true }); + } + } if (await isHomebrewAvailable()) { // homebrew is available - modes.push({ id: "homebrew", label: "Install with Homebrew" }); + modes.push({ id: "homebrew", label: "Install with Homebrew", supportsRefresh: true }); } - if (isLinux()) { - // on linux - modes.push({ id: "script", label: "Install with script" }); - } - modes.push({ id: "manual", label: "Install manually" }); + modes.push({ id: "manual", label: "Install manually", supportsRefresh: true }); return modes; } @@ -89,31 +94,48 @@ export class OllamaServer implements IModelServer { } async installServer(mode: string): Promise { + let installCommand: string | undefined; switch (mode) { + case "devspaces": { + env.openExternal(Uri.parse("https://developers.redhat.com/articles/2024/08/12/integrate-private-ai-coding-assistant-ollama")); + return false; + } case "homebrew": { this.currentStatus = ServerStatus.installing; //We need to detect the terminal output to know when installation stopped (successfully or not) - await terminalCommandRunner.runInTerminal( - "clear && brew install --cask ollama && sleep 3 && ollama list", //run ollama list to trigger the ollama daemon - { - name: "Granite Code Setup", - show: true, - } - ); - return true; + installCommand = [ + 'clear', + 'set -e', // Exit immediately if a command exits with a non-zero status + 'brew install --cask ollama', + 'sleep 3', + 'ollama list', // run ollama list to start the server + ].join(' && '); + break; } case "script": - this.currentStatus = ServerStatus.installing; - await terminalCommandRunner.runInTerminal(//We need to detect the terminal output to know when installation stopped (successfully or not) - "clear && curl -fsSL https://ollama.com/install.sh | sh", - { - name: "Granite Code Setup", - show: true, - } - ); - return true; + const start_ollama_sh = path.join(this.context.extensionPath, 'start_ollama.sh'); + installCommand = [ + 'clear', + 'set -e', // Exit immediately if a command exits with a non-zero status + 'command -v curl >/dev/null 2>&1 || { echo >&2 "curl is required but not installed. Aborting."; exit 1; }', + 'curl -fsSL https://ollama.com/install.sh | sh', + `chmod +x "${start_ollama_sh}"`, // Ensure the script is executable + `"${start_ollama_sh}"`, // Use quotes in case the path contains spaces + ].join(' && '); + break; case "manual": default: env.openExternal(Uri.parse("https://ollama.com/download")); + return true; + } + if (installCommand) { + this.currentStatus = ServerStatus.installing; + await terminalCommandRunner.runInTerminal( + installCommand, + { + name: "Granite Code Setup", + show: true, + } + ); } return true; } @@ -339,3 +361,8 @@ function isLinux(): boolean { function isWin(): boolean { return PLATFORM.startsWith("win"); } +function isDevspaces() { + //sudo is not available on Red Hat DevSpaces + return process.env['DEVWORKSPACE_ID'] !== undefined; +} + diff --git a/src/panels/setupGranitePage.ts b/src/panels/setupGranitePage.ts index d079579..3dc8f50 100644 --- a/src/panels/setupGranitePage.ts +++ b/src/panels/setupGranitePage.ts @@ -4,14 +4,14 @@ import { CancellationError, commands, Disposable, - ExtensionMode, + ExtensionContext, Uri, ViewColumn, Webview, WebviewPanel, window } from "vscode"; -import { DOWNLOADABLE_MODELS } from '../commons/constants'; +import { DOWNLOADABLE_MODELS, isDevMode } from '../commons/constants'; import { ProgressData } from "../commons/progressData"; import { ModelStatus, ServerStatus } from '../commons/statuses'; import { IModelServer } from '../modelServer'; @@ -52,11 +52,11 @@ export class SetupGranitePage { * @param panel A reference to the webview panel * @param extensionUri The URI of the directory containing the extension */ - private constructor(panel: WebviewPanel, extensionUri: Uri, extensionMode: ExtensionMode) { + private constructor(panel: WebviewPanel, context: ExtensionContext) { this._panel = panel; this.server = useMockServer ? new MockServer(300) : - new OllamaServer(); + new OllamaServer(context); // Set an event listener to listen for when the panel is disposed (i.e. when the user closes // the panel or when the panel is closed programmatically) this._panel.onDidDispose(() => this.dispose(), null, this._disposables); @@ -64,14 +64,14 @@ export class SetupGranitePage { // Set the HTML content for the webview panel this._panel.webview.html = this._getWebviewContent( this._panel.webview, - extensionUri + context.extensionUri ); // Set an event listener to listen for messages passed from the webview context this._setWebviewMessageListener(this._panel.webview); - if (extensionMode === ExtensionMode.Development) { - this._setupFileWatcher(extensionUri); + if (isDevMode) { + this._setupFileWatcher(context.extensionUri); } } @@ -115,12 +115,13 @@ export class SetupGranitePage { * * @param extensionUri The URI of the directory containing the extension. */ - public static render(extensionUri: Uri, extensionMode: ExtensionMode) { + public static render(context: ExtensionContext) { if (SetupGranitePage.currentPanel) { // If the webview panel already exists reveal it SetupGranitePage.currentPanel._panel.reveal(ViewColumn.One); } else { // If a webview panel does not already exist create and show a new one + const extensionUri = context.extensionUri; const panel = window.createWebviewPanel( // Panel view type "modelSetup", @@ -140,7 +141,7 @@ export class SetupGranitePage { } ); - SetupGranitePage.currentPanel = new SetupGranitePage(panel, extensionUri, extensionMode); + SetupGranitePage.currentPanel = new SetupGranitePage(panel, context); } } diff --git a/start_ollama.sh b/start_ollama.sh new file mode 100755 index 0000000..9707f7e --- /dev/null +++ b/start_ollama.sh @@ -0,0 +1,95 @@ +#!/bin/bash + +# Timeout for waiting for Ollama to start (in seconds) +TIMEOUT=60 + +# Function to check if a command is available +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Function to check if we are in a container (GitHub Codespaces or similar) +in_container() { + # Check if we're running in GitHub Codespaces by checking the environment variable + [[ -f /.dockerenv || -f /run/.containerenv ]] +} + +# Function to start Ollama and check if it's running +start_ollama() { + if [ "$1" = "service" ]; then + echo "Starting Ollama service..." + if in_container; then + # Use the service command in a container + service ollama start + else + sudo systemctl start ollama + fi + else + echo "Starting Ollama as a background process..." + nohup ollama serve >/dev/null 2>&1 & + fi + + # Wait for Ollama to start (max TIMEOUT seconds) + for i in $(seq 1 $TIMEOUT); do + if curl -s --max-time 1 http://localhost:11434/api/version >/dev/null; then + echo "Ollama started successfully." + return 0 + fi + + # Display a progress indicator + printf "." + sleep 1 + done + + echo -e "\nFailed to start Ollama." + return 1 +} + +# Main logic +if curl -s --max-time 1 http://localhost:11434/api/version >/dev/null; then + echo "Ollama already started." + exit 0 +fi + +if in_container; then + echo "Running in a container environment." + # Check if Ollama is running using the service command + if service --status-all 2>&1 | grep -q 'ollama'; then + if service ollama status >/dev/null 2>&1; then + echo "Ollama service is already running." + exit 0 + else + echo "Starting Ollama service..." + if start_ollama service; then + exit 0 + else + exit 1 + fi + fi + fi +else + if command_exists systemctl; then + if systemctl is-active --quiet ollama; then + echo "Ollama service is already running." + exit 0 + elif systemctl list-unit-files ollama.service >/dev/null 2>&1; then + echo "Starting Ollama service..." + if start_ollama service; then + exit 0 + else + exit 1 + fi + fi + fi +fi + +if command_exists ollama; then + if start_ollama; then + exit 0 + else + exit 1 + fi +else + echo "Ollama is not installed or not in the PATH." + exit 1 +fi diff --git a/webviews/src/App.tsx b/webviews/src/App.tsx index 77740a9..e017cde 100644 --- a/webviews/src/App.tsx +++ b/webviews/src/App.tsx @@ -30,7 +30,7 @@ function App() { const [serverStatus, setServerStatus] = useState(ServerStatus.unknown); const [modelStatuses, setModelStatuses] = useState>(new Map()); - const [installationModes, setInstallationModes] = useState<{ id: string, label: string }[]>([]); + const [installationModes, setInstallationModes] = useState<{ id: string, label: string, supportsRefresh: true }[]>([]); const [enabled, setEnabled] = useState(true); @@ -202,7 +202,9 @@ function App() { {/* New section for additional buttons */} {serverStatus === ServerStatus.missing && installationModes.length > 0 && (
-

This page will refresh once Ollama is installed.

+ {installationModes.some(mode => mode.supportsRefresh === true) && ( +

This page will refresh once Ollama is installed.

+ )} {installationModes.map((mode) => (