Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 40 additions & 34 deletions src/handlers/chatCompletionsHandler.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { RouterError } from '../errors/RouterError';
import { endpointStrings } from '../providers/types';
import {
constructConfigFromRequestHeaders,
tryTargetsRecursively,
Expand All @@ -13,43 +14,48 @@ import { Context } from 'hono';
* @throws Will throw an error if no provider options can be determined or if the request to the provider(s) fails.
* @throws Will throw an 500 error if the handler fails due to some reasons
*/
export async function chatCompletionsHandler(c: Context): Promise<Response> {
try {
let request = await c.req.json();
let requestHeaders = Object.fromEntries(c.req.raw.headers);
const camelCaseConfig = constructConfigFromRequestHeaders(requestHeaders);
const tryTargetsResponse = await tryTargetsRecursively(
c,
camelCaseConfig ?? {},
request,
requestHeaders,
'chatComplete',
'POST',
'config'
);

return tryTargetsResponse;
} catch (err: any) {
console.log('chatCompletion error', err.message);
let statusCode = 500;
let errorMessage = 'Something went wrong';
export function chatCompletionsHandler(endpoint: endpointStrings) {
async function handler(c: Context): Promise<Response> {
try {
const method = c.req.method;
let request = method === 'POST' ? await c.req.json() : {};
let requestHeaders = Object.fromEntries(c.req.raw.headers);
const camelCaseConfig = constructConfigFromRequestHeaders(requestHeaders);
const tryTargetsResponse = await tryTargetsRecursively(
c,
camelCaseConfig ?? {},
request,
requestHeaders,
endpoint,
method,
'config'
);

if (err instanceof RouterError) {
statusCode = 400;
errorMessage = err.message;
}
return tryTargetsResponse;
} catch (err: any) {
console.log(`${endpoint} error: ${err.message}`);
let statusCode = 500;
let errorMessage = 'Something went wrong';

return new Response(
JSON.stringify({
status: 'failure',
message: errorMessage,
}),
{
status: statusCode,
headers: {
'content-type': 'application/json',
},
if (err instanceof RouterError) {
statusCode = 400;
errorMessage = err.message;
}
);

return new Response(
JSON.stringify({
status: 'failure',
message: errorMessage,
}),
{
status: statusCode,
headers: {
'content-type': 'application/json',
},
}
);
}
}
return handler;
}
33 changes: 31 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,36 @@ app.onError((err, c) => {
* POST route for '/v1/chat/completions'.
* Handles requests by passing them to the chatCompletionsHandler.
*/
app.post('/v1/chat/completions', requestValidator, chatCompletionsHandler);
app.post(
'/v1/chat/completions',
requestValidator,
chatCompletionsHandler('chatComplete')
);
app.get(
'/v1/chat/completions',
requestValidator,
chatCompletionsHandler('listChatCompletions')
);
app.get(
'/v1/chat/completions/:completionId',
requestValidator,
chatCompletionsHandler('getChatCompletion')
);
app.get(
'/v1/chat/completions/:completionId/messages',
requestValidator,
chatCompletionsHandler('getChatCompletionMessages')
);
app.post(
'/v1/chat/completions/:completionId',
requestValidator,
chatCompletionsHandler('updateChatCompletion')
);
app.delete(
'/v1/chat/completions/:completionId',
requestValidator,
chatCompletionsHandler('deleteChatCompletion')
);

/**
* POST route for '/v1/completions'.
Expand Down Expand Up @@ -211,7 +240,7 @@ app.all(
*/
app.post('/v1/prompts/*', requestValidator, (c) => {
if (c.req.url.endsWith('/v1/chat/completions')) {
return chatCompletionsHandler(c);
return chatCompletionsHandler('chatComplete')(c);
} else if (c.req.url.endsWith('/v1/completions')) {
return completionsHandler(c);
}
Expand Down
45 changes: 30 additions & 15 deletions src/providers/azure-openai/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ const AzureOpenAIAPIConfig: ProviderAPIConfig = {
}
return headersObj;
},
getEndpoint: ({ providerOptions, fn, gatewayRequestURL }) => {
getEndpoint: ({ providerOptions, fn, gatewayRequestURL, c }) => {
const { apiVersion, urlToFetch, deploymentId } = providerOptions;
let mappedFn = fn;

Expand All @@ -76,7 +76,12 @@ const AzureOpenAIAPIConfig: ProviderAPIConfig = {
}
}

const path = gatewayRequestURL.split('/v1')?.[1];
const url = new URL(gatewayRequestURL);
if (apiVersion) {
url.searchParams.set('api-version', apiVersion);
}
const path = url.pathname;
const searchParams = url.searchParams.toString();

switch (mappedFn) {
case 'complete': {
Expand Down Expand Up @@ -104,31 +109,41 @@ const AzureOpenAIAPIConfig: ProviderAPIConfig = {
return `/realtime?api-version=${apiVersion}&deployment=${deploymentId}`;
}
case 'uploadFile':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'retrieveFile':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'listFiles':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'deleteFile':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'retrieveFileContent':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'createFinetune':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'retrieveFinetune':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'listFinetunes':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'cancelFinetune':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'createBatch':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'retrieveBatch':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'cancelBatch':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'listBatches':
return `${path}?api-version=${apiVersion}`;
return `${path}?${searchParams}`;
case 'listChatCompletions':
return `/deployments/${deploymentId}/${path}?${searchParams}`;
case 'getChatCompletion':
return `/deployments/${deploymentId}/${path}?${searchParams}`;
case 'getChatCompletionMessages':
return `/deployments/${deploymentId}/${path}?${searchParams}`;
case 'updateChatCompletion':
return `/deployments/${deploymentId}/${path}?${searchParams}`;
case 'deleteChatCompletion':
return `/deployments/${deploymentId}/${path}?${searchParams}`;
default:
return '';
}
Expand Down
7 changes: 7 additions & 0 deletions src/providers/azure-openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import { OpenAIFileUploadRequestTransform } from '../openai/uploadFile';
import { AzureOpenAIFinetuneResponseTransform } from './utils';
import { AzureOpenAICreateBatchConfig } from './createBatch';
import { AzureOpenAIGetBatchOutputRequestHandler } from './getBatchOutput';
import { OpenAIUpdateChatCompletionConfig } from '../openai/updateChatCompletions';

const AzureOpenAIConfig: ProviderConfigs = {
complete: AzureOpenAICompleteConfig,
Expand All @@ -40,6 +41,7 @@ const AzureOpenAIConfig: ProviderConfigs = {
createTranscription: {},
createTranslation: {},
realtime: {},
updateChatCompletion: OpenAIUpdateChatCompletionConfig,
cancelFinetune: {},
cancelBatch: {},
createBatch: AzureOpenAICreateBatchConfig,
Expand All @@ -60,6 +62,11 @@ const AzureOpenAIConfig: ProviderConfigs = {
retrieveFile: AzureOpenAIResponseTransform,
deleteFile: AzureOpenAIResponseTransform,
retrieveFileContent: AzureOpenAIResponseTransform,
listChatCompletions: AzureOpenAIResponseTransform,
getChatCompletion: AzureOpenAIResponseTransform,
getChatCompletionMessages: AzureOpenAIResponseTransform,
updateChatCompletion: AzureOpenAIResponseTransform,
deleteChatCompletion: AzureOpenAIResponseTransform,
createFinetune: AzureOpenAIResponseTransform,
retrieveFinetune: AzureOpenAIFinetuneResponseTransform,
createBatch: AzureOpenAIResponseTransform,
Expand Down
10 changes: 10 additions & 0 deletions src/providers/openai/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,16 @@ const OpenAIAPIConfig: ProviderAPIConfig = {
return basePath;
case 'listBatches':
return basePath;
case 'listChatCompletions':
return basePath;
case 'getChatCompletion':
return basePath;
case 'getChatCompletionMessages':
return basePath;
case 'updateChatCompletion':
return basePath;
case 'deleteChatCompletion':
return basePath;
default:
return '';
}
Expand Down
17 changes: 17 additions & 0 deletions src/providers/openai/deleteChatCompletions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { OPEN_AI } from '../../globals';
import { ErrorResponse, DeleteChatCompletionResponse } from '../types';
import { OpenAIErrorResponseTransform } from './utils';

export const OpenAIDeleteChatCompletionResponseTransform: (
response: DeleteChatCompletionResponse | ErrorResponse,
responseStatus: number
) => DeleteChatCompletionResponse | ErrorResponse = (
response,
responseStatus
) => {
if (responseStatus !== 200 && 'error' in response) {
return OpenAIErrorResponseTransform(response, OPEN_AI);
}

return response;
};
18 changes: 18 additions & 0 deletions src/providers/openai/getChatCompletion.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { OPEN_AI } from '../../globals';
import { ErrorResponse } from '../types';
import { OpenAIChatCompleteResponse } from './chatComplete';
import { OpenAIErrorResponseTransform } from './utils';

export const OpenAIGetChatCompletionResponseTransform: (
response: OpenAIChatCompleteResponse | ErrorResponse,
responseStatus: number
) => OpenAIChatCompleteResponse | ErrorResponse = (
response,
responseStatus
) => {
if (responseStatus !== 200 && 'error' in response) {
return OpenAIErrorResponseTransform(response, OPEN_AI);
}

return response;
};
14 changes: 14 additions & 0 deletions src/providers/openai/getChatMessages.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { OPEN_AI } from '../../globals';
import { ErrorResponse, GetChatMessagesResponse } from '../types';
import { OpenAIErrorResponseTransform } from './utils';

export const OpenAIGetChatMessagesResponseTransform: (
response: GetChatMessagesResponse | ErrorResponse,
responseStatus: number
) => GetChatMessagesResponse | ErrorResponse = (response, responseStatus) => {
if (responseStatus !== 200 && 'error' in response) {
return OpenAIErrorResponseTransform(response, OPEN_AI);
}

return response;
};
14 changes: 14 additions & 0 deletions src/providers/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ import { OpenAIRetrieveBatchResponseTransform } from './retrieveBatch';
import { OpenAICancelBatchResponseTransform } from './cancelBatch';
import { OpenAIListBatchesResponseTransform } from './listBatches';
import { OpenAIGetBatchOutputRequestHandler } from './getBatchOutput';
import { OpenAIListChatCompletionsResponseTransform } from './listChatCompletions';
import { OpenAIGetChatMessagesResponseTransform } from './getChatMessages';
import { OpenAIDeleteChatCompletionResponseTransform } from './deleteChatCompletions';
import { OpenAIGetChatCompletionResponseTransform } from './getChatCompletion';
import {
OpenAIUpdateChatCompletionResponseTransform,
OpenAIUpdateChatCompletionConfig,
} from './updateChatCompletions';
import {
OpenAICreateFinetuneConfig,
OpenAIFinetuneResponseTransform,
Expand All @@ -52,6 +60,7 @@ const OpenAIConfig: ProviderConfigs = {
createBatch: OpenAICreateBatchConfig,
createFinetune: OpenAICreateFinetuneConfig,
cancelBatch: {},
updateChatCompletion: OpenAIUpdateChatCompletionConfig,
cancelFinetune: {},
requestHandlers: {
getBatchOutput: OpenAIGetBatchOutputRequestHandler,
Expand Down Expand Up @@ -79,6 +88,11 @@ const OpenAIConfig: ProviderConfigs = {
retrieveBatch: OpenAIRetrieveBatchResponseTransform,
cancelBatch: OpenAICancelBatchResponseTransform,
listBatches: OpenAIListBatchesResponseTransform,
listChatCompletions: OpenAIListChatCompletionsResponseTransform,
getChatCompletion: OpenAIGetChatCompletionResponseTransform,
getChatCompletionMessages: OpenAIGetChatMessagesResponseTransform,
updateChatCompletion: OpenAIUpdateChatCompletionResponseTransform,
deleteChatCompletion: OpenAIDeleteChatCompletionResponseTransform,
createFinetune: OpenAIFinetuneResponseTransform,
retrieveFinetune: OpenAIFinetuneResponseTransform,
},
Expand Down
18 changes: 18 additions & 0 deletions src/providers/openai/listChatCompletions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { OPEN_AI } from '../../globals';
import { ErrorResponse } from '../types';
import { OpenAIChatCompleteResponse } from './chatComplete';
import { OpenAIErrorResponseTransform } from './utils';

export const OpenAIListChatCompletionsResponseTransform: (
response: OpenAIChatCompleteResponse[] | ErrorResponse,
responseStatus: number
) => OpenAIChatCompleteResponse[] | ErrorResponse = (
response,
responseStatus
) => {
if (responseStatus !== 200 && 'error' in response) {
return OpenAIErrorResponseTransform(response, OPEN_AI);
}

return response;
};
25 changes: 25 additions & 0 deletions src/providers/openai/updateChatCompletions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { OPEN_AI } from '../../globals';
import { ErrorResponse, ProviderConfig } from '../types';
import { OpenAIChatCompleteResponse } from './chatComplete';
import { OpenAIErrorResponseTransform } from './utils';

export const OpenAIUpdateChatCompletionConfig: ProviderConfig = {
metadata: {
param: 'metadata',
required: true,
},
};

export const OpenAIUpdateChatCompletionResponseTransform: (
response: OpenAIChatCompleteResponse | ErrorResponse,
responseStatus: number
) => OpenAIChatCompleteResponse | ErrorResponse = (
response,
responseStatus
) => {
if (responseStatus !== 200 && 'error' in response) {
return OpenAIErrorResponseTransform(response, OPEN_AI);
}

return response;
};
Loading
Loading