diff --git a/chatapi/src/index.ts b/chatapi/src/index.ts index 28e8e838a8..a686884537 100644 --- a/chatapi/src/index.ts +++ b/chatapi/src/index.ts @@ -1,3 +1,4 @@ +/* eslint-disable no-console */ import express from 'express'; import dotenv from 'dotenv'; import cors from 'cors'; @@ -43,7 +44,7 @@ wss.on('connection', (ws) => { if (chatResponse) { ws.send(JSON.stringify({ 'type': 'final', - 'completionText': chatResponse.completionText, + 'completionText': chatResponse.completion, 'couchDBResponse': chatResponse.couchSaveResponse })); } @@ -53,7 +54,7 @@ wss.on('connection', (ws) => { }); ws.on('close', () => { - console.log('WebSocket connection closed'); // eslint-disable-line no-console + console.log('WebSocket connection closed'); }); }); @@ -66,7 +67,7 @@ app.post('/', async (req: any, res: any) => { try { if (!save) { - const response = await chatNoSave(data.content, data.aiProvider, data.context, data.assistant, false); + const response = await chatNoSave(data.content, data.aiProvider, data.context, false); return res.status(200).json({ 'status': 'Success', 'chat': response @@ -75,7 +76,7 @@ app.post('/', async (req: any, res: any) => { const response = await chat(data, false); return res.status(201).json({ 'status': 'Success', - 'chat': response?.completionText, + 'chat': response?.completion, 'couchDBResponse': response?.couchSaveResponse }); } @@ -95,4 +96,4 @@ app.get('/checkproviders', async (req: any, res: any) => { const port = process.env.SERVE_PORT || 5000; -server.listen(port, () => console.log(`Server running on port ${port}`)); // eslint-disable-line no-console +server.listen(port, () => console.log(`Server running on port ${port}`)); diff --git a/chatapi/src/models/chat.model.ts b/chatapi/src/models/chat.model.ts index 23fde46663..94dcd822ab 100644 --- a/chatapi/src/models/chat.model.ts +++ b/chatapi/src/models/chat.model.ts @@ -24,7 +24,7 @@ export interface ModelsDocument { } export interface ChatMessage { - role: 'user' | 'assistant'; + role: 'user' | 'assistant' | 'developer'; content: string; } @@ -34,3 +34,7 @@ export interface ChatItem { response: string; } +export interface ChatResponse { + responseId: string; + message: string; +} diff --git a/chatapi/src/services/chat.service.ts b/chatapi/src/services/chat.service.ts index 3295c21967..69f6f2ab1e 100644 --- a/chatapi/src/services/chat.service.ts +++ b/chatapi/src/services/chat.service.ts @@ -1,13 +1,14 @@ import { DocumentInsertResponse } from 'nano'; +import { assistant } from '../config/ai-providers.config'; import { chatDB } from '../config/nano.config'; import { retrieveChatHistory } from '../utils/db.utils'; import { aiChat } from '../utils/chat.utils'; -import { AIProvider, ChatMessage } from '../models/chat.model'; +import { AIProvider, ChatMessage, ChatResponse } from '../models/chat.model'; function handleChatError(error: any) { if (error.response) { - throw new Error(`GPT Service Error: ${error.response.status} - ${error.response.data?.error?.code}`); + throw new Error(`Chatapi Service Error: ${error.response.status} - ${error.response.data?.error?.code}`); } else { throw new Error(error.message); } @@ -21,7 +22,7 @@ function handleChatError(error: any) { * @returns Object with completion text and CouchDB save response */ export async function chat(data: any, stream?: boolean, callback?: (response: string) => void): Promise<{ - completionText: string; + completion: string | ChatResponse; couchSaveResponse: DocumentInsertResponse; } | undefined> { const { content, ...dbData } = data; @@ -39,15 +40,24 @@ export async function chat(data: any, stream?: boolean, callback?: (response: st dbData.conversations = []; dbData.createdDate = Date.now(); dbData.aiProvider = aiProvider.name; + messages.push({ + 'role': 'developer', + 'content': assistant.instructions || '' + }); } - dbData.conversations.push({ 'id': Date.now().toString(), 'query': content, 'response': '' }); + dbData.conversations.push({ + 'id': Date.now().toString(), + 'query': content, + 'response': '' + }); const res = await chatDB.insert(dbData); messages.push({ 'role': 'user', content }); try { - const completionText = await aiChat(messages, aiProvider, dbData.assistant, dbData.context, stream, callback); + const completion = await aiChat(messages, aiProvider, dbData.context, stream, callback); + const completionText = typeof completion === 'string' ? completion : completion.message; dbData.conversations[dbData.conversations.length - 1].response = completionText; @@ -57,7 +67,7 @@ export async function chat(data: any, stream?: boolean, callback?: (response: st const couchSaveResponse = await chatDB.insert(dbData); return { - completionText, + completion, couchSaveResponse }; } catch (error: any) { @@ -68,22 +78,22 @@ export async function chat(data: any, stream?: boolean, callback?: (response: st export async function chatNoSave( content: any, aiProvider: AIProvider, - assistant: boolean, context?: any, stream?: boolean, callback?: (response: string) => void -): Promise { +): Promise { const messages: ChatMessage[] = []; messages.push({ 'role': 'user', content }); try { - const completionText = await aiChat(messages, aiProvider, assistant, context, stream, callback); + const completion = await aiChat(messages, aiProvider, context, stream, callback); + const completionText = typeof completion === 'string' ? completion : completion.message; messages.push({ 'role': 'assistant', 'content': completionText }); - return completionText; + return completion; } catch (error: any) { handleChatError(error); } diff --git a/chatapi/src/utils/chat-assistant.utils.ts b/chatapi/src/utils/chat-assistant.utils.ts deleted file mode 100644 index d6895b37ec..0000000000 --- a/chatapi/src/utils/chat-assistant.utils.ts +++ /dev/null @@ -1,103 +0,0 @@ -import { keys } from '../config/ai-providers.config'; -import { assistant } from '../config/ai-providers.config'; - -/** - * Creates an assistant with the specified model - * @param model - Model to use for assistant - * @returns Assistant object - */ -export async function createAssistant(model: string) { - return await keys.openai.beta.assistants.create({ - 'name': assistant?.name, - 'instructions': assistant?.instructions, - 'tools': [ { 'type': 'code_interpreter' } ], - model, - }); -} - -export async function createThread() { - return await keys.openai.beta.threads.create(); -} - -export async function addToThread(threadId: any, message: string) { - return await keys.openai.beta.threads.messages.create( - threadId, - { - 'role': 'user', - 'content': message - } - ); -} - -export async function createRun(threadID: any, assistantID: any, instructions?: string) { - return await keys.openai.beta.threads.runs.create( - threadID, - { - 'assistant_id': assistantID, - instructions - } - ); -} - -export async function waitForRunCompletion(threadId: any, runId: any) { - let runStatus = await keys.openai.beta.threads.runs.retrieve(threadId, runId); - while (runStatus.status !== 'completed') { - await new Promise((resolve) => setTimeout(resolve, 1000)); - runStatus = await keys.openai.beta.threads.runs.retrieve(threadId, runId); - } - return runStatus; -} - -export async function retrieveResponse(threadId: any): Promise { - const messages = await keys.openai.beta.threads.messages.list(threadId); - for (const msg of messages.data) { - if ('text' in msg.content[0] && msg.role === 'assistant') { - return msg.content[0].text.value; - } - } - throw new Error('Unable to retrieve response from assistant'); -} - -// Run with streaming enabled -export async function createAndHandleRunWithStreaming( - threadID: any, assistantID: any, instructions: string, callback?: (response: string) => void -): Promise { - let completionText = ''; - - return new Promise((resolve, reject) => { - keys.openai.beta.threads.runs.stream(threadID, { - 'assistant_id': assistantID, - instructions - }) - .on('textDelta', (textDelta: { value: string }) => { - if (textDelta && textDelta.value) { - completionText += textDelta.value; - if (callback) { - callback(textDelta.value); - } - } - }) - .on('toolCallDelta', (toolCallDelta: { type: string; code_interpreter: { input: string; outputs: any[] } }) => { - if (toolCallDelta.type === 'code_interpreter') { - if (toolCallDelta && toolCallDelta.code_interpreter && toolCallDelta.code_interpreter.input) { - completionText += toolCallDelta.code_interpreter.input; - if (callback) { - callback(toolCallDelta.code_interpreter.input); - } - } - if (toolCallDelta && toolCallDelta.code_interpreter && toolCallDelta.code_interpreter.outputs) { - toolCallDelta.code_interpreter.outputs.forEach((output) => { - if (output.type === 'logs' && output.logs) { - completionText += output.logs; - if (callback) { - callback(output.logs); - } - } - }); - } - } - }) - .on('end', () => resolve(completionText)) - .on('error', reject); - }); -} diff --git a/chatapi/src/utils/chat-helpers.utils.ts b/chatapi/src/utils/chat-helpers.utils.ts index e6bda46965..159c6266c1 100644 --- a/chatapi/src/utils/chat-helpers.utils.ts +++ b/chatapi/src/utils/chat-helpers.utils.ts @@ -1,16 +1,7 @@ import { models } from '../config/ai-providers.config'; -import { AIProvider, ChatMessage } from '../models/chat.model'; +import { AIProvider, ChatMessage, ChatResponse } from '../models/chat.model'; import { Attachment } from '../models/db-doc.model'; import { fetchFileFromCouchDB } from './db.utils'; -import { - createAssistant, - createThread, - addToThread, - createRun, - waitForRunCompletion, - retrieveResponse, - createAndHandleRunWithStreaming, -} from './chat-assistant.utils'; import { extractTextFromDocument } from './text-extraction.utils'; /** @@ -22,8 +13,7 @@ import { extractTextFromDocument } from './text-extraction.utils'; export async function aiChatStream( messages: ChatMessage[], aiProvider: AIProvider, - assistant: boolean, - context: any = '', + context: any, callback?: (response: string) => void ): Promise { const provider = models[aiProvider.name]; @@ -32,30 +22,15 @@ export async function aiChatStream( } const model = aiProvider.model ?? provider.defaultModel; - if (assistant) { - try { - const asst = await createAssistant(model); - const thread = await createThread(); - for (const message of messages) { - await addToThread(thread.id, message.content); - } - - const completionText = await createAndHandleRunWithStreaming(thread.id, asst.id, context.data, callback); - - return completionText; - } catch (error) { - throw new Error(`Error processing request ${error}`); - } - } - - const completion = await provider.ai.chat.completions.create({ + const stream = await provider.ai.responses.create({ model, - messages, + 'instructions': context.data || '', + 'input': messages, 'stream': true, }); let completionText = ''; - for await (const chunk of completion) { + for await (const chunk of stream) { if (chunk.choices && chunk.choices.length > 0) { const response = chunk.choices[0].delta?.content || ''; completionText += response; @@ -78,9 +53,8 @@ export async function aiChatStream( export async function aiChatNonStream( messages: ChatMessage[], aiProvider: AIProvider, - assistant: boolean, - context: any = '', -): Promise { + context: any, +): Promise { const provider = models[aiProvider.name]; if (!provider) { throw new Error('Unsupported AI provider'); @@ -100,31 +74,19 @@ export async function aiChatNonStream( } } - if (assistant) { - try { - const asst = await createAssistant(model); - const thread = await createThread(); - for (const message of messages) { - await addToThread(thread.id, message.content); - } - const run = await createRun(thread.id, asst.id, context.data); - await waitForRunCompletion(thread.id, run.id); - - return await retrieveResponse(thread.id); - } catch (error) { - throw new Error(`Error processing request ${error}`); - } - } - - const completion = await provider.ai.chat.completions.create({ + const response = await provider.ai.responses.create({ model, - messages, + 'instructions': context.data || '', + 'input': messages, }); - const completionText = completion.choices[0]?.message?.content; - if (!completionText) { + const responseText = response.output_text; + if (!responseText) { throw new Error('Unexpected API response'); } - return completionText; + return { + 'responseId': response.id, + 'message': response.output_text + }; } diff --git a/chatapi/src/utils/chat.utils.ts b/chatapi/src/utils/chat.utils.ts index 0e70507a6a..34c66f2a0f 100644 --- a/chatapi/src/utils/chat.utils.ts +++ b/chatapi/src/utils/chat.utils.ts @@ -1,17 +1,16 @@ import { aiChatStream, aiChatNonStream } from './chat-helpers.utils'; -import { AIProvider, ChatMessage } from '../models/chat.model'; +import { AIProvider, ChatMessage, ChatResponse } from '../models/chat.model'; export async function aiChat( messages: ChatMessage[], aiProvider: AIProvider, - assistant: boolean, context?: any, stream?: boolean, callback?: (response: string) => void -): Promise { +): Promise { if (stream) { - return await aiChatStream(messages, aiProvider, assistant, context, callback); + return await aiChatStream(messages, aiProvider, context, callback); } else { - return await aiChatNonStream(messages, aiProvider, assistant, context); + return await aiChatNonStream(messages, aiProvider, context); } } diff --git a/package.json b/package.json index 0e7fb42727..1fd4c57dcf 100755 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "material-icons": "^0.3.1", "mime": "4.0.0", "ngx-image-cropper": "^3.2.1", + "openai": "^6.3.0", "pdfmake": "^0.1.63", "pouchdb": "^7.1.1", "pouchdb-authentication": "^1.1.3", diff --git a/src/app/chat/chat-sidebar/chat-sidebar.component.ts b/src/app/chat/chat-sidebar/chat-sidebar.component.ts index ac1260cb8c..7948d9942f 100644 --- a/src/app/chat/chat-sidebar/chat-sidebar.component.ts +++ b/src/app/chat/chat-sidebar/chat-sidebar.component.ts @@ -158,7 +158,7 @@ export class ChatSidebarComponent implements OnInit, OnDestroy { .subscribe( (conversations: any) => { this.conversations = conversations - .filter((conversation) => !conversation?.context) + .filter((conversation) => !conversation?.context?.resource) .sort((a, b) => { const dateA = a.updatedDate || a.createdDate; const dateB = b.updatedDate || b.createdDate; @@ -166,9 +166,9 @@ export class ChatSidebarComponent implements OnInit, OnDestroy { return dateB - dateA; }); this.filteredConversations = [ ...this.conversations ]; - if (newChat) { - this.selectConversation(this.filteredConversations[0], 0); - } + // if (newChat) { + // this.selectConversation(this.filteredConversations[0], 0); + // } this.initializeFormGroups(); }, (error) => console.log(error) diff --git a/src/app/chat/chat-window/chat-window.component.ts b/src/app/chat/chat-window/chat-window.component.ts index 7063084cb0..6de94935d1 100644 --- a/src/app/chat/chat-window/chat-window.component.ts +++ b/src/app/chat/chat-window/chat-window.component.ts @@ -40,8 +40,10 @@ export class ChatWindowComponent implements OnInit, OnDestroy, AfterViewInit { user: this.userService.get().name, content: '', aiProvider: { name: 'openai' }, - assistant: false, - context: '', + context: { + data: '', + resource: null + }, }; providers: AIProvider[] = []; trackByFn = trackByIdVal; @@ -244,17 +246,16 @@ export class ChatWindowComponent implements OnInit, OnDestroy, AfterViewInit { this.setSelectedConversation(); if (this.context) { - this.data.assistant = true; this.data.context = this.context; } if (this.streaming) { - this.conversations.push({ id: Date.now().toString(), role: 'user', query: content, response: '' }); + this.conversations.push({ role: 'user', query: content, response: '' }); this.chatService.sendUserInput(this.data); } else { this.chatService.getPrompt(this.data, true).subscribe( (completion: any) => { - this.conversations.push({ id: Date.now().toString(), query: content, response: completion?.chat }); + this.conversations.push({ id: completion?.chat.responseId, query: content, response: completion?.chat.message }); this.selectedConversationId = { '_id': completion.couchDBResponse?.id, '_rev': completion.couchDBResponse?.rev @@ -262,7 +263,7 @@ export class ChatWindowComponent implements OnInit, OnDestroy, AfterViewInit { this.postSubmit(); }, (error: any) => { - this.conversations.push({ id: Date.now().toString(), query: content, response: 'Error: ' + error.message, error: true }); + this.conversations.push({ query: content, response: 'Error: ' + error.message, error: true }); this.spinnerOn = true; this.promptForm.controls.prompt.setValue(''); } diff --git a/src/app/chat/chat.model.ts b/src/app/chat/chat.model.ts index 28c4b9c883..3886bf88a3 100644 --- a/src/app/chat/chat.model.ts +++ b/src/app/chat/chat.model.ts @@ -1,11 +1,15 @@ +interface Context { + data: string; + resource: any; + }; + export interface ConversationForm { _id: string; _rev: string; user: string; content: string; aiProvider: AIProvider; - assistant: boolean; - context: string; + context: Context; } export interface Conversation { @@ -16,7 +20,7 @@ export interface Conversation { title: string; createdDate: number; updatedDate: number; - context?: any; + context: Context; } export interface Message { diff --git a/src/app/manager-dashboard/reports/reports-detail-activities.component.html b/src/app/manager-dashboard/reports/reports-detail-activities.component.html index 5eaa1b3e54..9884b54602 100644 --- a/src/app/manager-dashboard/reports/reports-detail-activities.component.html +++ b/src/app/manager-dashboard/reports/reports-detail-activities.component.html @@ -58,10 +58,6 @@ Chat Responses {{element?.conversationLength}} - - Assistant - {{element?.assistant}} - Shared {{element?.shared}} diff --git a/src/app/manager-dashboard/reports/reports-detail-activities.component.ts b/src/app/manager-dashboard/reports/reports-detail-activities.component.ts index 56e4b94765..69f736c46c 100644 --- a/src/app/manager-dashboard/reports/reports-detail-activities.component.ts +++ b/src/app/manager-dashboard/reports/reports-detail-activities.component.ts @@ -10,7 +10,7 @@ const columns = { resources: [ 'title', 'count', 'averageRating' ], courses: [ 'title', 'steps', 'exams', 'enrollments', 'count', 'stepsCompleted', 'completions', 'averageRating' ], health: [ 'weekOf', 'count', 'unique' ], - chat: [ 'aiProvider', 'user', 'createdDate', 'conversationLength', 'assistant', 'shared' ] + chat: [ 'aiProvider', 'user', 'createdDate', 'conversationLength', 'shared' ] }; @Component({ @@ -56,7 +56,6 @@ export class ReportsDetailActivitiesComponent implements OnInit, OnChanges, Afte ...activity, createdDate: new Date(activity.createdDate).getTime(), hasAttachments: activity.context?.resource?.attachments ? $localize`True` : '', - assistant: activity.assistant ? $localize`True` : '', shared: activity.shared ? $localize`True` : '', conversationLength: activity?.conversations?.length || 0 })); diff --git a/src/app/manager-dashboard/reports/reports-detail.component.ts b/src/app/manager-dashboard/reports/reports-detail.component.ts index bf58cb3002..e414a1a49d 100644 --- a/src/app/manager-dashboard/reports/reports-detail.component.ts +++ b/src/app/manager-dashboard/reports/reports-detail.component.ts @@ -738,7 +738,6 @@ export class ReportsDetailComponent implements OnInit, OnDestroy { [$localize`AI Provider`]: activity.aiProvider || '', [$localize`Timestamp`]: new Date(activity.createdDate).toLocaleString(), [$localize`Chat Responses`]: activity.conversations?.length || 0, - [$localize`Assistant`]: activity.assistant ? 'Yes' : 'No', [$localize`Shared`]: activity.shared ? 'Yes' : 'No', [$localize`Has Attachments`]: activity.context?.resource?.attachments?.length > 0 ? 'Yes' : 'No' })); diff --git a/src/app/submissions/submissions.service.ts b/src/app/submissions/submissions.service.ts index 97f2097a2d..e825ff84b7 100644 --- a/src/app/submissions/submissions.service.ts +++ b/src/app/submissions/submissions.service.ts @@ -768,8 +768,7 @@ export class SubmissionsService { response = await this.chatService.getPrompt( { content: surveyAnalysisPrompt(exam.type, exam.name, exam.description, payloadString), - aiProvider: { name: 'openai' }, - assistant: false + aiProvider: { name: 'openai' } }, false ).toPromise();