Skip to content

Commit

Permalink
Add logging
Browse files Browse the repository at this point in the history
  • Loading branch information
RussellCanfield committed Jan 22, 2024
1 parent 7d8e2b1 commit 26dce9c
Show file tree
Hide file tree
Showing 9 changed files with 204 additions and 58 deletions.
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,14 @@ The models above will require enough RAM to run them correctly, you should have

## Release Notes

### 0.1.3

Added logging output for the extension for troubleshooting purposes. Improved error handling and user feedback for invalidated configurations.

<p align="center" width="100%">
<img width="20%" src="./docs/LoggingOutput.png">
</p>

### 0.1.2

- Add two new settings:
Expand Down
Binary file added docs/LoggingOutput.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "wing-man",
"displayName": "Wingman-AI",
"description": "Wingman - AI powered assistant to help you write your best code, we won't leave you hanging.",
"version": "0.1.2",
"version": "0.1.3",
"publisher": "WingMan",
"license": "MIT",
"author": {
Expand Down
2 changes: 2 additions & 0 deletions src/events/eventEmitter.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import * as vscode from "vscode";

class EventEmitter {
public _onFatalError: vscode.EventEmitter<void> =
new vscode.EventEmitter<void>();
public _onQueryStart: vscode.EventEmitter<void> =
new vscode.EventEmitter<void>();
public _onQueryComplete: vscode.EventEmitter<void> =
Expand Down
24 changes: 24 additions & 0 deletions src/providers/loggingProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import * as vscode from "vscode";

class LoggingProvider {
dbgChannel: vscode.OutputChannel;

constructor() {
this.dbgChannel = vscode.window.createOutputChannel("Wingman");
}

public logInfo(message: string): void {
this.dbgChannel.appendLine(
`${new Date().toLocaleString()} - [info] ${message}`
);
}

public logError(message: string): void {
this.dbgChannel.appendLine(
`${new Date().toLocaleString()} - [error] ${message}`
);
}
}

const loggingProvider = new LoggingProvider();
export { loggingProvider };
15 changes: 15 additions & 0 deletions src/providers/statusBarProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ import { eventEmitter } from "../events/eventEmitter";

export class ActivityStatusBar {
activityStatusBarItem: vscode.StatusBarItem;
isInErrorState: boolean = false;

public readonly onFatalError: vscode.Event<void> =
eventEmitter._onFatalError.event;

public readonly onQueryStart: vscode.Event<void> =
eventEmitter._onQueryStart.event;
Expand All @@ -26,14 +30,25 @@ export class ActivityStatusBar {
this.onQueryComplete(() => {
this.TogglePending(false);
});

this.onFatalError(() => {
this.ToggleError();
});
}

public TogglePending(pending: boolean) {
if (this.isInErrorState) return;

this.activityStatusBarItem.text = `${
pending ? "$(sync~spin)" : "$(wingman-logo)"
} Wingman`;
}

public ToggleError() {
this.isInErrorState = true;
this.activityStatusBarItem.text = "$(testing-error-icon) Wingman";
}

dispose() {
this.activityStatusBarItem?.dispose();
}
Expand Down
3 changes: 2 additions & 1 deletion src/service/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { Ollama } from "./ollama/ollama";
import { Settings } from "../types/Settings";
import { HuggingFace } from "./huggingface/huggingface";
import { AIModel } from "../types/Models";
import { loggingProvider } from "../providers/loggingProvider";

export function GetProviderFromSettings(): AIProvider {
const config = vscode.workspace.getConfiguration("Wingman");
Expand All @@ -12,7 +13,7 @@ export function GetProviderFromSettings(): AIProvider {
?.toLocaleLowerCase()
.trim();

console.log(`AI Provider: ${aiProvider} found.`);
loggingProvider.logInfo(`AI Provider: ${aiProvider} found.`);

if (aiProvider === "huggingface") {
return new HuggingFace();
Expand Down
110 changes: 78 additions & 32 deletions src/service/huggingface/huggingface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import { Settings, defaultMaxTokens } from "../../types/Settings";
import { HuggingFaceAIModel } from "../../types/Models";
import { CodeLlama } from "./models/codellama";
import { Mistral } from "./models/mistral";
import { loggingProvider } from "../../providers/loggingProvider";
import { eventEmitter } from "../../events/eventEmitter";

type HuggingFaceRequest = {
inputs: string;
Expand Down Expand Up @@ -37,47 +39,57 @@ export class HuggingFace implements AIProvider {
const huggingFaceConfig =
config.get<Settings["huggingface"]>("HuggingFace");

console.log("HuggingFace settings loaded: ", huggingFaceConfig);
loggingProvider.logInfo(
`HuggingFace settings loaded: ${JSON.stringify(huggingFaceConfig)}`
);

if (huggingFaceConfig) {
this.settings = huggingFaceConfig;
if (!huggingFaceConfig) {
this.handleError("Unable to log HuggingFace configuration.");
return;
}

if (!this.settings.apiKey.trim()) {
vscode.window.showErrorMessage(
"Hugging Face API key is required."
);
throw new Error("Missing Hugging Face API key.");
}
this.settings = huggingFaceConfig!;

this.chatModel = this.getChatModel(this.settings.chatModel);
this.codeModel = this.getCodeModel(this.settings.codeModel);
if (!this.settings.apiKey.trim()) {
const errorMsg = "Hugging Face API key is required.";
vscode.window.showErrorMessage(errorMsg);
loggingProvider.logInfo(errorMsg);
throw new Error(errorMsg);
}

this.chatModel = this.getChatModel(this.settings.chatModel);
this.codeModel = this.getCodeModel(this.settings.codeModel);
}

private getCodeModel(codeModel: string): HuggingFaceAIModel {
private handleError(message: string) {
vscode.window.showErrorMessage(message);
loggingProvider.logError(message);
eventEmitter._onFatalError.fire();
throw new Error(message);
}

private getCodeModel(codeModel: string): HuggingFaceAIModel | undefined {
if (codeModel.includes("codellama")) {
return new CodeLlama();
} else if (codeModel.includes("mistral")) {
return new Mistral();
} else {
vscode.window.showErrorMessage(
"Invalid code model name, currently code supports the CodeLlama model."
);
throw new Error("Invalid code model name");
}

this.handleError(
"Invalid code model name, currently code supports the CodeLlama model."
);
}

private getChatModel(chatModel: string): HuggingFaceAIModel {
private getChatModel(chatModel: string): HuggingFaceAIModel | undefined {
if (chatModel.includes("codellama")) {
return new CodeLlama();
} else if (chatModel.includes("mistral")) {
return new Mistral();
} else {
vscode.window.showErrorMessage(
"Invalid chat model name, currently chat supports the Mistral model."
);
throw new Error("Invalid chat model name");
}

this.handleError(
"Invalid chat model name, currently chat supports the Mistral model."
);
}

private getSafeUrl() {
Expand All @@ -94,7 +106,7 @@ export class HuggingFace implements AIProvider {
signal: AbortSignal
) {
if (signal.aborted) {
return null;
return undefined;
}
return fetch(new URL(`${this.getSafeUrl()}${modelName}`), {
method: "POST",
Expand All @@ -112,10 +124,26 @@ export class HuggingFace implements AIProvider {
modelName: string,
signal: AbortSignal
) {
const response = await this.fetchModelResponse(
payload,
modelName,
signal
const startTime = new Date().getTime();
let response: Response | undefined;

try {
response = await this.fetchModelResponse(
payload,
modelName,
signal
);
} catch (error) {
loggingProvider.logError(
`HuggingFace - chat request with model: ${modelName} failed with the following error: ${error}`
);
}

const endTime = new Date().getTime();
const executionTime = (endTime - startTime) / 1000;

loggingProvider.logInfo(
`HuggingFace - chat execution time: ${executionTime} seconds`
);

if (!response?.body) {
Expand All @@ -141,6 +169,8 @@ export class HuggingFace implements AIProvider {
ending: string,
signal: AbortSignal
): Promise<string> {
const startTime = new Date().getTime();

const codeRequestOptions: HuggingFaceRequest = {
inputs: this.codeModel!.CodeCompletionPrompt.replace(
"{beginning}",
Expand All @@ -159,10 +189,25 @@ export class HuggingFace implements AIProvider {
},
};

const response = await this.fetchModelResponse(
codeRequestOptions,
this.settings?.codeModel!,
signal
let response: Response | undefined;

try {
response = await this.fetchModelResponse(
codeRequestOptions,
this.settings?.codeModel!,
signal
);
} catch (error) {
loggingProvider.logError(
`HuggingFace - code completion request with model ${this.settings?.codeModel} failed with the following error: ${error}`
);
}

const endTime = new Date().getTime();
const executionTime = (endTime - startTime) / 1000;

loggingProvider.logInfo(
`HuggingFace - Code Completion execution time: ${executionTime} seconds`
);

if (!response?.body) {
Expand Down Expand Up @@ -205,6 +250,7 @@ export class HuggingFace implements AIProvider {

this.clearChatHistory();

//left incase HF implements streaming.
for await (const chunk of this.generate(
chatPayload,
this.settings?.chatModel!,
Expand Down
Loading

0 comments on commit 26dce9c

Please sign in to comment.