Skip to content

Commit fcdcc00

Browse files
committed
feat: refactor API keys management
- add general class SecureStorage for keyring one and file based one - implement file based secure storage - add --show-keys command to see added API keys for all providers - update readme with some issues resolved and some more precice simitation description - add getting embedding model provider api separately from main model provider - intoduce 'weak model' that may handle some easy LLM tasks
1 parent f5dc52e commit fcdcc00

9 files changed

+60
-42
lines changed

src/ai/agent.ts

+6-6
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import type { ChatMessage } from "llamaindex";
22
import { OpenAIAgent } from "llamaindex";
33
import { DynamicContextData, gatherContextData } from "../cli/contextUtils";
4-
import { LLM_ID, MESSAGE_ROLES, MODEL_PRICES } from "../constants";
4+
import { MESSAGE_ROLES, MODELS, WEAK_MODEL_ID } from "../constants";
55
import { getCorrectness, getFaithfulness, getRelevancy } from "../features/userScore/evaluations/evaluations";
66
import { getUserEvaluationScore } from "../features/userScore/getUserEvaluationScore";
77
import { askUserCallback } from "../tools/askUser";
@@ -16,7 +16,7 @@ import {
1616
} from "../utils/database";
1717
import { formatAgentMessage } from "../utils/formatting";
1818
import { generateConversationTitle } from "../utils/generateConversationTitle";
19-
import { getApiKeyForModel, getOrPromptForAPIKey } from "../utils/getOrPromptForAPIKey";
19+
import { getOrPromptForAPIKey } from "../utils/getOrPromptForAPIKey";
2020
import { UsageCostResult } from "../utils/interface";
2121
import { logger, LogLevel, LogLevelType } from "../utils/logger";
2222
import { parseLLMResponse } from "../utils/parseLLMResponse";
@@ -107,7 +107,7 @@ export async function runAgent(
107107
if (input === "<input_aborted_by_user />") {
108108
input = "<service_message>User expressed explicit intent to exit the program.</service_message>";
109109
}
110-
const apiKey = await getApiKeyForModel(model);
110+
const apiKey = await getOrPromptForAPIKey(model);
111111
if (!apiKey) {
112112
logger.error("No API key found");
113113
throw new Error("LLM API key not found. Please run the application again to set it up.");
@@ -278,8 +278,8 @@ async function finalizeAgentRun(
278278

279279
logger.debug("Generating conversation title");
280280
const title = await generateConversationTitle(fullConversation, {
281-
apiKey: await getOrPromptForAPIKey(LLM_ID),
282-
modelName: LLM_ID,
281+
apiKey: await getOrPromptForAPIKey(WEAK_MODEL_ID),
282+
modelName: WEAK_MODEL_ID,
283283
});
284284

285285
logger.debug("Getting user evaluation score");
@@ -306,7 +306,7 @@ async function finalizeAgentRun(
306306

307307
function countUsageCost(usage: Record<string, number>, model: string): UsageCostResult {
308308
// Get model pricing from the current model being used
309-
const modelConfig = MODEL_PRICES[model] ?? MODEL_PRICES["gpt-4o-mini"]; // fallback to a default model
309+
const modelConfig = MODELS[model] ?? MODELS["gpt-4o-mini"]; // fallback to a default model
310310

311311
// Normalize token counts from different possible field names
312312
const inputTokens = usage.input_tokens || usage.prompt_tokens || 0;

src/ai/retrieval/vectorStore.ts

+5-4
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import {
77
storageContextFromDefaults,
88
VectorStoreIndex,
99
} from "llamaindex";
10-
import { EMBEDDINGS_MODEL_ID, LLM_ID, VECTOR_STORE_PATH } from "../../constants";
10+
import { EMBEDDINGS_MODEL_ID, VECTOR_STORE_PATH, WEAK_MODEL_ID } from "../../constants";
1111
import { getOrPromptForAPIKey } from "../../utils/getOrPromptForAPIKey";
1212
import { logger } from "../../utils/logger";
1313

@@ -16,16 +16,17 @@ let vectorStoreIndex: VectorStoreIndex | null = null;
1616
export async function initializeVectorStoreIndex(): Promise<VectorStoreIndex> {
1717
const storageContext = await storageContextFromDefaults({ persistDir: VECTOR_STORE_PATH });
1818

19-
const apiKey = await getOrPromptForAPIKey(LLM_ID);
19+
const embeddingProviderApiKey = await getOrPromptForAPIKey(EMBEDDINGS_MODEL_ID);
20+
const weakModelApiKey = await getOrPromptForAPIKey(WEAK_MODEL_ID);
2021
let index: VectorStoreIndex;
2122

2223
try {
2324
logger.debug("Attempting to load existing index");
2425
index = await VectorStoreIndex.init({
2526
storageContext,
2627
serviceContext: serviceContextFromDefaults({
27-
embedModel: new OpenAIEmbedding({ apiKey, model: EMBEDDINGS_MODEL_ID }),
28-
llm: new OpenAI({ apiKey, model: LLM_ID }),
28+
embedModel: new OpenAIEmbedding({ apiKey: embeddingProviderApiKey, model: EMBEDDINGS_MODEL_ID }),
29+
llm: new OpenAI({ apiKey: weakModelApiKey, model: WEAK_MODEL_ID }),
2930
}),
3031
});
3132
logger.debug("Existing index loaded successfully");

src/constants.ts

+4-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
import path from "path";
22
import { getUserHomeDir } from "./utils/getUserHomeDir";
33

4-
export const LLM_ID = "gpt-4o-mini";
5-
64
export const API_KEY_PROMPTS = {
75
OPENAI: "Please enter your OpenAI API key: ",
86
GROQ: "Please enter your Groq API key: ",
@@ -30,6 +28,7 @@ export const USER_PREFS_FILE_NAME = "user_preferences.json";
3028
export const APP_CONFIG_FILE_PATH = path.join(CONFIG_DIR_PATH, APP_CONFIG_FILE_NAME);
3129
export const USER_PREFS_FILE_PATH = path.join(CONFIG_DIR_PATH, USER_PREFS_FILE_NAME);
3230
export const EMBEDDINGS_MODEL_ID = "text-embedding-3-small";
31+
export const EMBEDDING_MODELS = [EMBEDDINGS_MODEL_ID];
3332
export const VECTOR_STORE_PATH = path.join(CONFIG_DIR_PATH, "vector_store");
3433

3534
export const MESSAGE_ROLES = {
@@ -69,7 +68,7 @@ interface AIModelConfig {
6968
default?: boolean;
7069
}
7170

72-
export const MODEL_PRICES: Record<string, AIModelConfig> = {
71+
export const MODELS: Record<string, AIModelConfig> = {
7372
"gpt-4o-mini": {
7473
id: "gpt-4o-mini",
7574
friendlyName: "openAIGpt4oMiniModel",
@@ -171,3 +170,5 @@ export const CONTEXT_ALLOCATION = {
171170
maxChars: 20000,
172171
},
173172
};
173+
174+
export const WEAK_MODEL_ID = "gpt-4o-mini";

src/index.ts

+9-4
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { APIError } from "openai";
44
import { agentLoop } from "./ai/agent";
55
import { formatApiKey, getAllStoredKeys } from "./cli/getAllKeys";
66
import { parseArguments, printHelp } from "./cli/interface";
7-
import { MAX_INPUT_LENGTH } from "./constants";
7+
import { EMBEDDINGS_MODEL_ID, MAX_INPUT_LENGTH } from "./constants";
88
import { deleteAPIKey } from "./utils/apiKeyManager";
99
import { loadConfig, saveConfig } from "./utils/config";
1010
import { initializeDatabase } from "./utils/database";
@@ -45,6 +45,11 @@ async function main() {
4545
appConfig.model = model;
4646
}
4747

48+
await getOrPromptForAPIKey(EMBEDDINGS_MODEL_ID, {
49+
prePromptText:
50+
"Please enter your OpenAI API key. It is used for embeddings and will consume small amount of tokens",
51+
});
52+
4853
logger.info(`running using model "${appConfig.model}"`);
4954

5055
logger.setLevel(appConfig.logLevel);
@@ -62,7 +67,7 @@ async function main() {
6267

6368
if (resetKey) {
6469
const provider = getProviderFromModel(appConfig.model);
65-
deleteAPIKey(provider);
70+
await deleteAPIKey(provider);
6671
console.log(
6772
chalk.green("API key has been deleted. You will be prompted for a new key on the next run."),
6873
);
@@ -90,8 +95,8 @@ async function main() {
9095
} catch (error) {
9196
if (error instanceof APIError) {
9297
if (error.status === 401) {
93-
console.error(chalk.red("Invalid OpenAI API key." + error.message));
94-
deleteAPIKey(appConfig.model);
98+
console.error(chalk.red("Invalid API key." + error.message));
99+
await deleteAPIKey(appConfig.model);
95100
await getOrPromptForAPIKey(appConfig.model);
96101
}
97102
}

src/utils/apiKeyManager.ts

+5-5
Original file line numberDiff line numberDiff line change
@@ -127,9 +127,9 @@ class StorageManager {
127127
const storageManager = new StorageManager();
128128

129129
// Export the methods
130-
export const getAPIKey = (provider: APIProvider) => storageManager.getAPIKey(provider);
131-
export const storeAPIKey = (apiKey: string, provider: APIProvider) =>
130+
export const getAPIKey = async (provider: APIProvider) => storageManager.getAPIKey(provider);
131+
export const storeAPIKey = async (apiKey: string, provider: APIProvider) =>
132132
storageManager.storeAPIKey(apiKey, provider);
133-
export const deleteAPIKey = (modelId: string) => storageManager.deleteAPIKey(modelId);
134-
export const checkStorageAvailability = () => storageManager.checkStorageAvailability();
135-
export const getStorageDiagnostics = () => storageManager.getStorageDiagnostics();
133+
export const deleteAPIKey = async (modelId: string) => storageManager.deleteAPIKey(modelId);
134+
export const checkStorageAvailability = async () => storageManager.checkStorageAvailability();
135+
export const getStorageDiagnostics = async () => storageManager.getStorageDiagnostics();

src/utils/config.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { execSync } from "child_process";
22
import fs from "fs";
3-
import { APP_CONFIG_FILE_PATH, CONFIG_DIR_PATH, MODEL_PRICES, USER_PREFS_FILE_PATH } from "../constants";
3+
import { APP_CONFIG_FILE_PATH, CONFIG_DIR_PATH, MODELS, USER_PREFS_FILE_PATH } from "../constants";
44
import { LogLevel, LogLevelType } from "./logger";
55

66
export interface AppConfig {
@@ -17,7 +17,7 @@ export interface UserPreferences {
1717

1818
const DEFAULT_APP_CONFIG: AppConfig = {
1919
logLevel: LogLevel.WARN,
20-
model: Object.values(MODEL_PRICES).find((model) => model.default)?.id || "",
20+
model: Object.values(MODELS).find((model) => model.default)?.id || "",
2121
};
2222

2323
const DEFAULT_USER_PREFS: UserPreferences = {

src/utils/embeddings.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { OpenAIEmbedding } from "llamaindex";
2-
import { LLM_ID } from "../constants";
2+
import { WEAK_MODEL_ID } from "../constants";
33
import { getOrPromptForAPIKey } from "./getOrPromptForAPIKey";
44
import { logger } from "./logger";
55

@@ -13,7 +13,7 @@ const embeddingModelId = "text-embedding-3-small";
1313
let embeddingModel: OpenAIEmbedding | null = null;
1414

1515
export async function initializeEmbeddingModel(): Promise<void> {
16-
const apiKey = await getOrPromptForAPIKey(LLM_ID);
16+
const apiKey = await getOrPromptForAPIKey(WEAK_MODEL_ID);
1717
if (!apiKey) {
1818
throw new Error("OpenAI API key not found. Please set up your API key.");
1919
}

src/utils/getOrPromptForAPIKey.ts

+25-14
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,18 @@
11
import chalk from "chalk";
22
import { getFreeformInput } from "../cli/interface";
3-
import { API_KEY_PREFIXES, API_KEY_PROMPTS, APIProvider } from "../constants";
3+
import { API_KEY_PREFIXES, API_KEY_PROMPTS, APIProvider, EMBEDDINGS_MODEL_ID } from "../constants";
44
import { getAPIKey, storeAPIKey } from "./apiKeyManager";
55
import { logger } from "./logger";
66

77
const MAX_RETRIES = 5;
88

9-
export async function getOrPromptForAPIKey(modelId: string, forceNew: boolean = false): Promise<string> {
9+
interface GetAPIKeyOptions {
10+
forceNew?: boolean;
11+
prePromptText?: string;
12+
}
13+
14+
export async function getOrPromptForAPIKey(modelId: string, options: GetAPIKeyOptions = {}): Promise<string> {
15+
const { forceNew = false, prePromptText } = options;
1016
const provider = getProviderFromModel(modelId);
1117
let retries = 0;
1218

@@ -32,7 +38,11 @@ export async function getOrPromptForAPIKey(modelId: string, forceNew: boolean =
3238
if (!apiKey) {
3339
try {
3440
logger.debug(`Prompting user for ${provider} API key`);
35-
console.log(chalk.yellow(`${provider} API key not found or invalid.`));
41+
42+
const text = prePromptText
43+
? chalk.yellow(prePromptText)
44+
: `${provider} API key not found or invalid.`;
45+
console.log(chalk.yellow(text));
3646
apiKey = await getFreeformInput(API_KEY_PROMPTS[provider], true);
3747
} catch (inputError) {
3848
if (inputError.message.includes("non-interactive mode")) {
@@ -72,6 +82,17 @@ export async function getOrPromptForAPIKey(modelId: string, forceNew: boolean =
7282
process.exit(1);
7383
}
7484

85+
export async function getApiKeyForModel(modelId: string): Promise<string> {
86+
const apiKey = await getOrPromptForAPIKey(modelId);
87+
88+
if (!apiKey) {
89+
logger.error(`No ${modelId} API key found`);
90+
throw new Error(`${modelId} API key not found. Please run the application again to set it up.`);
91+
}
92+
93+
return apiKey;
94+
}
95+
7596
function isValidAPIKey(apiKey: string, provider: APIProvider): boolean {
7697
return apiKey.trim() !== "" && apiKey.startsWith(API_KEY_PREFIXES[provider]);
7798
}
@@ -82,18 +103,8 @@ export function getProviderFromModel(modelId: string): APIProvider {
82103
if (modelId.startsWith("gpt")) result = "OPENAI";
83104
if (modelId.startsWith("llama")) result = "GROQ";
84105
if (modelId.startsWith("claude")) result = "ANTHROPIC";
106+
if (EMBEDDINGS_MODEL_ID === modelId) result = "OPENAI";
85107
if (!result) throw new Error(`Unsupported model ID: ${modelId}`);
86108
logger.debug(`Model ID ${modelId} matches a supported API provider, "${result}"`);
87109
return result;
88110
}
89-
90-
export async function getApiKeyForModel(modelId: string): Promise<string> {
91-
const apiKey = await getOrPromptForAPIKey(modelId);
92-
93-
if (!apiKey) {
94-
logger.error(`No ${modelId} API key found`);
95-
throw new Error(`${modelId} API key not found. Please run the application again to set it up.`);
96-
}
97-
98-
return apiKey;
99-
}

src/utils/usageCost.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { MODEL_PRICES } from "../constants";
1+
import { MODELS } from "../constants";
22

33
interface UsageCostResult {
44
costUSD: number;
@@ -27,7 +27,7 @@ export function countUsageCost(
2727
},
2828
modelId: string,
2929
): UsageCostResult {
30-
const modelConfig = MODEL_PRICES[modelId];
30+
const modelConfig = MODELS[modelId];
3131
if (!modelConfig) {
3232
throw new Error(`Unknown model ID: ${modelId}`);
3333
}

0 commit comments

Comments
 (0)