Skip to content

Commit

Permalink
Merge pull request #57 from docker/cm/anthropic-provider
Browse files Browse the repository at this point in the history
Model Provider Secrets
  • Loading branch information
ColinMcNeil authored Dec 17, 2024
2 parents 8ec969b + 3540aee commit 3c6ab1a
Show file tree
Hide file tree
Showing 9 changed files with 125 additions and 87 deletions.
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ This project is a research prototype. It is ready to try and will give results f
*Docker internal users: You must be opted-out of mandatory sign-in.*

1. Install latest VSIX file https://github.com/docker/labs-ai-tools-vscode/releases
2. Execute command `>Docker AI: Set OpenAI API key...` and enter your OpenAI secret key.
You can run a prompt with a local model. Docs coming soon.
2. Execute command `>Docker AI: Set Secret Key...` to enter the api key for your model provider. This stop is optional if your pompt specifies a local model via `url:` and `model:` attributes.
3. Run a prompt

### Local Prompt:
Expand Down
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "labs-ai-tools-vscode",
"displayName": "Labs: AI Tools for VSCode",
"description": "Run & Debug AI Prompts with Dockerized tools",
"version": "0.1.9",
"version": "0.1.10",
"publisher": "docker",
"repository": {
"type": "git",
Expand Down Expand Up @@ -57,8 +57,8 @@
"title": "Docker AI: Run markdown commands"
},
{
"command": "docker.labs-ai-tools-vscode.set-openai-api-key",
"title": "Docker AI: Set OpenAI API key"
"command": "docker.labs-ai-tools-vscode.set-secret",
"title": "Docker AI: Set Secret Key"
},
{
"command": "docker.labs-ai-tools-vscode.save-prompt",
Expand Down
31 changes: 18 additions & 13 deletions src/commands/runPrompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@ import * as vscode from "vscode";
import { showPromptPicker } from "../utils/promptPicker";
import { createOutputBuffer } from "../utils/promptFilename";
import { spawnPromptImage, writeKeyToVolume } from "../utils/promptRunner";
import { verifyHasOpenAIKey } from "./setOpenAIKey";
import { getCredential } from "../utils/credential";
import { setProjectDir } from "./setProjectDir";
import { postToBackendSocket } from "../utils/ddSocket";
import { extensionOutput } from "../extension";
import { randomUUID } from "crypto";

const modelProviders = require('../modelproviders.json') as { label: string, id: string, file: string, patterns: string[] }[];

type PromptOption = 'local-dir' | 'local-file' | 'remote';

const getWorkspaceFolder = async () => {
Expand Down Expand Up @@ -42,12 +43,6 @@ const getWorkspaceFolder = async () => {
export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => void = (secrets: vscode.SecretStorage, mode: PromptOption) => vscode.window.withProgress({ location: vscode.ProgressLocation.Window, cancellable: true }, async (progress, token) => {
progress.report({ increment: 1, message: "Starting..." });
postToBackendSocket({ event: 'eventLabsPromptRunPrepare', properties: { mode } });
progress.report({ increment: 5, message: "Checking for OpenAI key..." });

const hasOpenAIKey = await verifyHasOpenAIKey(secrets, true);
if (!hasOpenAIKey) {
return;
}

progress.report({ increment: 5, message: "Checking for workspace..." });

Expand Down Expand Up @@ -90,8 +85,6 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v

progress.report({ increment: 5, message: "Writing prompt output file..." });

const apiKey = await secrets.get("openAIKey");

const { editor, doc } = await createOutputBuffer('prompt-output' + randomUUID() + '.md', hostDir);

if (!editor || !doc) {
Expand All @@ -118,7 +111,19 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v

try {
progress.report({ increment: 5, message: "Mounting secrets..." });
await writeKeyToVolume(apiKey!);
for (const provider of modelProviders) {
const secret = await secrets.get(provider.id);
if (secret) {
await writeKeyToVolume(provider.file, secret);
}
if (provider.id === 'openai' && !secret) {
const oldOpenAIKey = await secrets.get('openAIKey');
if (oldOpenAIKey) {
await writeKeyToVolume(provider.file, oldOpenAIKey);
}

}
}
progress.report({ increment: 5, message: "Running..." });
const ranges: Record<string, vscode.Range> = {};
const getBaseFunctionRange = () => new vscode.Range(doc.lineCount, 0, doc.lineCount, 0);
Expand Down Expand Up @@ -149,7 +154,7 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v
await writeToEditor(`${header} ROLE ${role}${content ? ` (${content})` : ''}\n\n`);
break;
case 'functions-done':
await writeToEditor('\n```' + `\n\n*entering tool*\n\n`);
await writeToEditor('\n```\n');
break;
case 'message':
await writeToEditor(json.params.content);
Expand All @@ -169,12 +174,12 @@ export const runPrompt: (secrets: vscode.SecretStorage, mode: PromptOption) => v
await writeToEditor(json.params.messages.map((m: any) => `# ${m.role}\n${m.content}`).join('\n') + '\n');
break;
case 'error':
const errorMSG = String(json.params.content) + String(json.params.message) + String(json.params.message)
const errorMSG = String(json.params.content) || String(json.params.message) || String(json.params.message)

Check warning on line 177 in src/commands/runPrompt.ts

View workflow job for this annotation

GitHub Actions / test

Missing semicolon
await writeToEditor('```error\n' + errorMSG + '\n```\n');
postToBackendSocket({ event: 'eventLabsPromptError', properties: { error: errorMSG } });
break;
default:
await writeToEditor(JSON.stringify(json, null, 2));
break;
}
}, token);
await doc.save();
Expand Down
68 changes: 68 additions & 0 deletions src/commands/secrets.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import { SecretStorage, ThemeIcon, window } from "vscode";

export const showSetSecretDialog = async (secrets: SecretStorage) => {
const modelProviders = require('../modelproviders.json') as { label: string, id: string, patterns: string[] }[];

type QuickPickItem = {
label: string;
id: string;
buttons: {
iconPath: ThemeIcon;
tooltip: string;
onClick: () => void;
}[];
};

const quickPick = window.createQuickPick<QuickPickItem>();


quickPick.items = modelProviders.map(provider => ({
label: provider.label,
id: provider.id,
buttons: [{
iconPath: new ThemeIcon('trashcan'),
tooltip: 'Clear', onClick: () => {
secrets.delete(provider.id);
void window.showInformationMessage(`${provider.label} key cleared.`);
}
}]
}));

const modelProvider = await new Promise<QuickPickItem | undefined>((resolve) => {
quickPick.onDidAccept(() => {
resolve(quickPick.selectedItems[0]);
quickPick.hide();
});
quickPick.onDidHide(() => {
resolve(undefined);
});
quickPick.onDidTriggerItemButton((event) => {
secrets.delete(event.item.id);
void window.showInformationMessage(`${event.item.label} key cleared.`);
resolve(undefined);
quickPick.hide();
});
quickPick.show();
});

if (!modelProvider) {
return;
}

const secret = await window.showInputBox({
title: `Enter your ${modelProvider.label} API key`,
password: true,
prompt: `Enter your ${modelProvider.label} API key`,
ignoreFocusOut: true,
});

if (!secret) {
return void window.showInformationMessage(`${modelProvider.label} key not set.`);
}


await secrets.store(modelProvider.id, secret);
void window.showInformationMessage(`${modelProvider.label} key set.`);

return modelProvider.id;
};
58 changes: 0 additions & 58 deletions src/commands/setOpenAIKey.ts

This file was deleted.

8 changes: 4 additions & 4 deletions src/extension.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import * as vscode from 'vscode';
import { setOpenAIKey } from './commands/setOpenAIKey';
import { showSetSecretDialog } from './commands/secrets';
import { nativeClient } from './utils/lsp';
import { spawn, spawnSync } from 'child_process';
import semver from 'semver';
Expand Down Expand Up @@ -84,10 +84,10 @@ export async function activate(context: vscode.ExtensionContext) {
setDefaultProperties(context);
postToBackendSocket({ event: 'eventLabsPromptActivated' });
ctx = context;
let setOpenAIKeyCommand = vscode.commands.registerCommand('docker.labs-ai-tools-vscode.set-openai-api-key', () => {
setOpenAIKey(context.secrets);
let setProviderSecretCommand = vscode.commands.registerCommand('docker.labs-ai-tools-vscode.set-secret', () => {
showSetSecretDialog(context.secrets);
});
context.subscriptions.push(setOpenAIKeyCommand);
context.subscriptions.push(setProviderSecretCommand);

const pullPromptImage = () => {
const process = spawn('docker', ['pull', "vonwig/prompts:latest"]);
Expand Down
18 changes: 18 additions & 0 deletions src/modelproviders.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[
{
"label": "Anthropic Claude",
"id": "anthropic",
"file": ".claude-api-key",
"patterns": [
"claude-*"
]
},
{
"label": "Open AI",
"id": "openai",
"file": ".openai-api-key",
"patterns": [
"gpt-*"
]
}
]
19 changes: 12 additions & 7 deletions src/utils/promptRunner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { notifications } from "./notifications";
import { extensionOutput } from "../extension";
import * as rpc from 'vscode-jsonrpc/node';
import path from "path";
import modelProviders from "../modelproviders.json";

const activePrompts: { [key: string]: Function } = {};

Expand Down Expand Up @@ -34,9 +35,10 @@ export const getRunArgs = async (promptRef: string, projectDir: string, username
'run',
'--rm',
'-v', '/var/run/docker.sock:/var/run/docker.sock',
'-v', 'openai_key:/secret',
'-v', 'docker-vsc-secrets:/root/secrets',
'-e', 'OPENAI_API_KEY_LOCATION=/root/secrets',
'-e', 'CLAUDE_API_KEY_LOCATION=/root/secrets',
'--mount', 'type=volume,source=docker-prompts,target=/prompts',
'-e', 'OPENAI_API_KEY_LOCATION=/secret',
'-v', "/run/host-services/backend.sock:/host-services/docker-desktop-backend.sock",
'-e', "DOCKER_DESKTOP_SOCKET_PATH=/host-services/docker-desktop-backend.sock",
];
Expand Down Expand Up @@ -122,22 +124,25 @@ const getJSONArgForPlatform = (json: object) => {
}
}

export const writeKeyToVolume = async (key: string) => {
export const writeKeyToVolume = async (keyFile: string, keyVal: string) => {

const args1 = ["pull", "vonwig/function_write_files"];

const args2 = [
"run",
"-v", "openai_key:/secret",
"-v", `docker-vsc-secrets:/secret`,
"--rm",
"--workdir", "/secret",
"vonwig/function_write_files",
getJSONArgForPlatform({ files: [{ path: ".openai-api-key", content: key, executable: false }] })
getJSONArgForPlatform({ files: [{ path: keyFile, content: keyVal, executable: false }] })
];

extensionOutput.appendLine(JSON.stringify({
"write-open-ai-key-to-volume": {
args1, args2
"write-secret-to-volume": {
keyFile,
keyVal,
args1,
args2
}
}));

Expand Down
1 change: 1 addition & 0 deletions tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"include": [
"src/promptgrammar.json",
"src/promptmetadatagrammar.json",
"src/modelproviders.json",
"src/**/*.ts"
]
}

0 comments on commit 3c6ab1a

Please sign in to comment.