From dd8022c19eb9c1ea6117069ced1517c28256b64e Mon Sep 17 00:00:00 2001 From: colinmcneil Date: Mon, 9 Dec 2024 12:14:59 -0500 Subject: [PATCH 1/2] Remove unecessary `entering tool` --- src/commands/runPrompt.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/commands/runPrompt.ts b/src/commands/runPrompt.ts index 98df8fc..3bad00a 100644 --- a/src/commands/runPrompt.ts +++ b/src/commands/runPrompt.ts @@ -149,7 +149,7 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v await writeToEditor(`${header} ROLE ${role}${content ? ` (${content})` : ''}\n\n`); break; case 'functions-done': - await writeToEditor('\n```' + `\n\n*entering tool*\n\n`); + await writeToEditor('\n```\n'); break; case 'message': await writeToEditor(json.params.content); @@ -169,12 +169,12 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v await writeToEditor(json.params.messages.map((m: any) => `# ${m.role}\n${m.content}`).join('\n') + '\n'); break; case 'error': - const errorMSG = String(json.params.content) + String(json.params.message) + String(json.params.message) + const errorMSG = String(json.params.content) || String(json.params.message) || String(json.params.message) await writeToEditor('```error\n' + errorMSG + '\n```\n'); postToBackendSocket({ event: 'eventLabsPromptError', properties: { error: errorMSG } }); break; default: - await writeToEditor(JSON.stringify(json, null, 2)); + break; } }, token); await doc.save(); From 3540aee8d55217177e64fb70b077bcacddacc5ab Mon Sep 17 00:00:00 2001 From: colinmcneil Date: Tue, 17 Dec 2024 09:15:52 -0500 Subject: [PATCH 2/2] Agnosticd model provider secrets --- README.md | 3 +- package.json | 6 ++-- src/commands/runPrompt.ts | 25 +++++++------ src/commands/secrets.ts | 68 ++++++++++++++++++++++++++++++++++++ src/commands/setOpenAIKey.ts | 58 ------------------------------ src/extension.ts | 8 ++--- src/modelproviders.json | 18 ++++++++++ src/utils/promptRunner.ts | 19 ++++++---- tsconfig.json | 1 + 9 files changed, 122 insertions(+), 84 deletions(-) create mode 100644 src/commands/secrets.ts delete mode 100644 src/commands/setOpenAIKey.ts create mode 100644 src/modelproviders.json diff --git a/README.md b/README.md index 275faef..fd50188 100644 --- a/README.md +++ b/README.md @@ -14,8 +14,7 @@ This project is a research prototype. It is ready to try and will give results f *Docker internal users: You must be opted-out of mandatory sign-in.* 1. Install latest VSIX file https://github.com/docker/labs-ai-tools-vscode/releases -2. Execute command `>Docker AI: Set OpenAI API key...` and enter your OpenAI secret key. - You can run a prompt with a local model. Docs coming soon. +2. Execute command `>Docker AI: Set Secret Key...` to enter the api key for your model provider. This stop is optional if your pompt specifies a local model via `url:` and `model:` attributes. 3. Run a prompt ### Local Prompt: diff --git a/package.json b/package.json index 3685955..25b47ce 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "labs-ai-tools-vscode", "displayName": "Labs: AI Tools for VSCode", "description": "Run & Debug AI Prompts with Dockerized tools", - "version": "0.1.9", + "version": "0.1.10", "publisher": "docker", "repository": { "type": "git", @@ -57,8 +57,8 @@ "title": "Docker AI: Run markdown commands" }, { - "command": "docker.labs-ai-tools-vscode.set-openai-api-key", - "title": "Docker AI: Set OpenAI API key" + "command": "docker.labs-ai-tools-vscode.set-secret", + "title": "Docker AI: Set Secret Key" }, { "command": "docker.labs-ai-tools-vscode.save-prompt", diff --git a/src/commands/runPrompt.ts b/src/commands/runPrompt.ts index 3bad00a..8d18019 100644 --- a/src/commands/runPrompt.ts +++ b/src/commands/runPrompt.ts @@ -5,13 +5,14 @@ import * as vscode from "vscode"; import { showPromptPicker } from "../utils/promptPicker"; import { createOutputBuffer } from "../utils/promptFilename"; import { spawnPromptImage, writeKeyToVolume } from "../utils/promptRunner"; -import { verifyHasOpenAIKey } from "./setOpenAIKey"; import { getCredential } from "../utils/credential"; import { setProjectDir } from "./setProjectDir"; import { postToBackendSocket } from "../utils/ddSocket"; import { extensionOutput } from "../extension"; import { randomUUID } from "crypto"; +const modelProviders = require('../modelproviders.json') as { label: string, id: string, file: string, patterns: string[] }[]; + type PromptOption = 'local-dir' | 'local-file' | 'remote'; const getWorkspaceFolder = async () => { @@ -42,12 +43,6 @@ const getWorkspaceFolder = async () => { export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => void = (secrets: vscode.SecretStorage, mode: PromptOption) => vscode.window.withProgress({ location: vscode.ProgressLocation.Window, cancellable: true }, async (progress, token) => { progress.report({ increment: 1, message: "Starting..." }); postToBackendSocket({ event: 'eventLabsPromptRunPrepare', properties: { mode } }); - progress.report({ increment: 5, message: "Checking for OpenAI key..." }); - - const hasOpenAIKey = await verifyHasOpenAIKey(secrets, true); - if (!hasOpenAIKey) { - return; - } progress.report({ increment: 5, message: "Checking for workspace..." }); @@ -90,8 +85,6 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v progress.report({ increment: 5, message: "Writing prompt output file..." }); - const apiKey = await secrets.get("openAIKey"); - const { editor, doc } = await createOutputBuffer('prompt-output' + randomUUID() + '.md', hostDir); if (!editor || !doc) { @@ -118,7 +111,19 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v try { progress.report({ increment: 5, message: "Mounting secrets..." }); - await writeKeyToVolume(apiKey!); + for (const provider of modelProviders) { + const secret = await secrets.get(provider.id); + if (secret) { + await writeKeyToVolume(provider.file, secret); + } + if (provider.id === 'openai' && !secret) { + const oldOpenAIKey = await secrets.get('openAIKey'); + if (oldOpenAIKey) { + await writeKeyToVolume(provider.file, oldOpenAIKey); + } + + } + } progress.report({ increment: 5, message: "Running..." }); const ranges: Record = {}; const getBaseFunctionRange = () => new vscode.Range(doc.lineCount, 0, doc.lineCount, 0); diff --git a/src/commands/secrets.ts b/src/commands/secrets.ts new file mode 100644 index 0000000..7c74229 --- /dev/null +++ b/src/commands/secrets.ts @@ -0,0 +1,68 @@ +import { SecretStorage, ThemeIcon, window } from "vscode"; + +export const showSetSecretDialog = async (secrets: SecretStorage) => { + const modelProviders = require('../modelproviders.json') as { label: string, id: string, patterns: string[] }[]; + + type QuickPickItem = { + label: string; + id: string; + buttons: { + iconPath: ThemeIcon; + tooltip: string; + onClick: () => void; + }[]; + }; + + const quickPick = window.createQuickPick(); + + + quickPick.items = modelProviders.map(provider => ({ + label: provider.label, + id: provider.id, + buttons: [{ + iconPath: new ThemeIcon('trashcan'), + tooltip: 'Clear', onClick: () => { + secrets.delete(provider.id); + void window.showInformationMessage(`${provider.label} key cleared.`); + } + }] + })); + + const modelProvider = await new Promise((resolve) => { + quickPick.onDidAccept(() => { + resolve(quickPick.selectedItems[0]); + quickPick.hide(); + }); + quickPick.onDidHide(() => { + resolve(undefined); + }); + quickPick.onDidTriggerItemButton((event) => { + secrets.delete(event.item.id); + void window.showInformationMessage(`${event.item.label} key cleared.`); + resolve(undefined); + quickPick.hide(); + }); + quickPick.show(); + }); + + if (!modelProvider) { + return; + } + + const secret = await window.showInputBox({ + title: `Enter your ${modelProvider.label} API key`, + password: true, + prompt: `Enter your ${modelProvider.label} API key`, + ignoreFocusOut: true, + }); + + if (!secret) { + return void window.showInformationMessage(`${modelProvider.label} key not set.`); + } + + + await secrets.store(modelProvider.id, secret); + void window.showInformationMessage(`${modelProvider.label} key set.`); + + return modelProvider.id; +}; \ No newline at end of file diff --git a/src/commands/setOpenAIKey.ts b/src/commands/setOpenAIKey.ts deleted file mode 100644 index bb4d55d..0000000 --- a/src/commands/setOpenAIKey.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { SecretStorage, window } from "vscode"; - -const setKey = async (secrets: SecretStorage) => { - const key = await window.showInputBox({ - title: "OpenAI API Key", - password: true, - prompt: "Enter your OpenAI API key", - ignoreFocusOut: true, - }); - if (!key) { - // return; - await secrets.delete('openAIKey'); - return; - } - await secrets.store('openAIKey', key); - void window.showInformationMessage("OpenAI key set."); -}; - -export const setOpenAIKey = async (secrets: SecretStorage, skipQuickPick: boolean = false) => { - if (skipQuickPick) { - await setKey(secrets); - return; - } - - const option = await window.showQuickPick([{ label: "Set key" }, { label: "Delete key" }], { - ignoreFocusOut: true, - }); - if (!option) { - return; - } - if (option.label === "Set key") { - await setKey(secrets); - } else { - await secrets.delete('openAIKey'); - window.showInformationMessage('OpenAI key deleted'); - } - -}; - - -export const verifyHasOpenAIKey = async (secrets: SecretStorage, didRunAutomatically = false) => { - const openAIKey = await secrets.get('openAIKey'); - if (!openAIKey) { - return await window.showErrorMessage('No OpenAI API key found. Please set one or use a dummy key for Ollama.', { - modal: didRunAutomatically - }, 'Set Key',).then( - async (res) => { - if (res === 'Set Key') { - await setOpenAIKey(secrets, true); - return true; - } - else { - return false; - } - }); - } - return true; -}; \ No newline at end of file diff --git a/src/extension.ts b/src/extension.ts index e3523a8..c79c59b 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -1,5 +1,5 @@ import * as vscode from 'vscode'; -import { setOpenAIKey } from './commands/setOpenAIKey'; +import { showSetSecretDialog } from './commands/secrets'; import { nativeClient } from './utils/lsp'; import { spawn, spawnSync } from 'child_process'; import semver from 'semver'; @@ -84,10 +84,10 @@ export async function activate(context: vscode.ExtensionContext) { setDefaultProperties(context); postToBackendSocket({ event: 'eventLabsPromptActivated' }); ctx = context; - let setOpenAIKeyCommand = vscode.commands.registerCommand('docker.labs-ai-tools-vscode.set-openai-api-key', () => { - setOpenAIKey(context.secrets); + let setProviderSecretCommand = vscode.commands.registerCommand('docker.labs-ai-tools-vscode.set-secret', () => { + showSetSecretDialog(context.secrets); }); - context.subscriptions.push(setOpenAIKeyCommand); + context.subscriptions.push(setProviderSecretCommand); const pullPromptImage = () => { const process = spawn('docker', ['pull', "vonwig/prompts:latest"]); diff --git a/src/modelproviders.json b/src/modelproviders.json new file mode 100644 index 0000000..67a3343 --- /dev/null +++ b/src/modelproviders.json @@ -0,0 +1,18 @@ +[ + { + "label": "Anthropic Claude", + "id": "anthropic", + "file": ".claude-api-key", + "patterns": [ + "claude-*" + ] + }, + { + "label": "Open AI", + "id": "openai", + "file": ".openai-api-key", + "patterns": [ + "gpt-*" + ] + } +] \ No newline at end of file diff --git a/src/utils/promptRunner.ts b/src/utils/promptRunner.ts index 271b30a..821091b 100644 --- a/src/utils/promptRunner.ts +++ b/src/utils/promptRunner.ts @@ -5,6 +5,7 @@ import { notifications } from "./notifications"; import { extensionOutput } from "../extension"; import * as rpc from 'vscode-jsonrpc/node'; import path from "path"; +import modelProviders from "../modelproviders.json"; const activePrompts: { [key: string]: Function } = {}; @@ -34,9 +35,10 @@ export const getRunArgs = async (promptRef: string, projectDir: string, username 'run', '--rm', '-v', '/var/run/docker.sock:/var/run/docker.sock', - '-v', 'openai_key:/secret', + '-v', 'docker-vsc-secrets:/root/secrets', + '-e', 'OPENAI_API_KEY_LOCATION=/root/secrets', + '-e', 'CLAUDE_API_KEY_LOCATION=/root/secrets', '--mount', 'type=volume,source=docker-prompts,target=/prompts', - '-e', 'OPENAI_API_KEY_LOCATION=/secret', '-v', "/run/host-services/backend.sock:/host-services/docker-desktop-backend.sock", '-e', "DOCKER_DESKTOP_SOCKET_PATH=/host-services/docker-desktop-backend.sock", ]; @@ -122,22 +124,25 @@ const getJSONArgForPlatform = (json: object) => { } } -export const writeKeyToVolume = async (key: string) => { +export const writeKeyToVolume = async (keyFile: string, keyVal: string) => { const args1 = ["pull", "vonwig/function_write_files"]; const args2 = [ "run", - "-v", "openai_key:/secret", + "-v", `docker-vsc-secrets:/secret`, "--rm", "--workdir", "/secret", "vonwig/function_write_files", - getJSONArgForPlatform({ files: [{ path: ".openai-api-key", content: key, executable: false }] }) + getJSONArgForPlatform({ files: [{ path: keyFile, content: keyVal, executable: false }] }) ]; extensionOutput.appendLine(JSON.stringify({ - "write-open-ai-key-to-volume": { - args1, args2 + "write-secret-to-volume": { + keyFile, + keyVal, + args1, + args2 } })); diff --git a/tsconfig.json b/tsconfig.json index 452f7d3..0fcd382 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -18,6 +18,7 @@ "include": [ "src/promptgrammar.json", "src/promptmetadatagrammar.json", + "src/modelproviders.json", "src/**/*.ts" ] } \ No newline at end of file