Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AI provider: Rename EdgeDB to Gel #1150

Open
wants to merge 1 commit into
base: gel
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 16 additions & 13 deletions packages/vercel-ai-provider/README.md
Original file line number Diff line number Diff line change
@@ -1,37 +1,40 @@
# Provider for the Vercel AI SDK

The provider for the [Vercel AI SDK](https://sdk.vercel.ai/docs) contains language model support for
the OpenAi, Mistral and Anthropic chat and completion APIs that implements EdgeDB RAG, and embedding model support for the OpenAI and Mistral embeddings API.
The provider for the [Vercel AI SDK](https://sdk.vercel.ai/docs) contains
language model support for the OpenAi, Mistral and Anthropic chat and completion
APIs that implements Gel RAG, and embedding model support for the OpenAI and
Mistral embeddings API.

## Setup

Provider is available in the `@edgedb/vercel-ai-provider` module. You can install it with:
Provider is available in the `@gel/vercel-ai-provider` module. You can install
it with:

```bash
npm i @edgedb/vercel-ai-provider
npm i @gel/vercel-ai-provider
```

## Provider Instance

You can import the default provider instance `edgedb` from `@edgedb/vercel-ai-provider`:
You can import the default provider instance `gel` from `@gel/vercel-ai-provider`:

```ts
import { edgedb } from "@edgedb/vercel-ai-provider";
import { gel } from "@gel/vercel-ai-provider";
```

## Example

```ts
import { generateText } from "ai";
import { createClient } from "edgedb";
import { edgedb } from "@edgedb/vercel-ai-provider";
import { createClient } from "gel";
import { gel } from "@gel/vercel-ai-provider";

const textModel = (await edgedb).languageModel("gpt-4-turbo-preview");
const textModel = (await gel).languageModel("gpt-4-turbo-preview", {
context: { query: "your context" },
});

const { text } = await generateText({
model: textModel.withSettings({
context: { query: "your context" },
}),
model: textModel,
prompt: "your prompt",
});

Expand All @@ -40,4 +43,4 @@ console.log(text);

## Documentation

Please check out the **[EdgeDB provider documentation](https://docs.edgedb.com/ai/vercel-ai-provider)** for more information.
Please check out the **[Gel provider documentation](https://docs.gel.com/ai/vercel_ai_sdk_provider)** for more information.
6 changes: 3 additions & 3 deletions packages/vercel-ai-provider/package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "@edgedb/vercel-ai-provider",
"name": "@gel/vercel-ai-provider",
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add repository info:

  "repository": {
    "type": "git",
    "url": "https://github.com/gel/gel-js.git",
    "directory": "packages/vercel-ai-provider"
  },

"description": "Provider for the Vercel AI SDK",
"version": "0.0.1",
"license": "Apache-2.0",
Expand Down Expand Up @@ -36,11 +36,11 @@
"tsup": "^8",
"typescript": "5.5.4",
"zod": "3.23.8",
"edgedb": "*"
"gel": "*"
},
"peerDependencies": {
"zod": "^3.0.0",
"edgedb": "^1.5.0"
"gel": "^1.5.0"
},
"engines": {
"node": ">=18"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { type LanguageModelV1Prompt } from "@ai-sdk/provider";
import type { EdgeDBMessage } from "./edgedb-chat-settings";
import type { GelMessage } from "./gel-chat-settings";

export function convertToEdgeDBMessages(
export function convertToGelMessages(
prompt: LanguageModelV1Prompt,
): EdgeDBMessage[] {
const messages: EdgeDBMessage[] = [];
): GelMessage[] {
const messages: GelMessage[] = [];

for (const { role, content } of prompt) {
switch (role) {
Expand Down
113 changes: 0 additions & 113 deletions packages/vercel-ai-provider/src/edgedb-provider.ts

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -17,41 +17,41 @@ import {
combineHeaders,
} from "@ai-sdk/provider-utils";
import {
type EdgeDBChatModelId,
type EdgeDBChatSettings,
type EdgeDBMessage,
type GelChatModelId,
type GelChatSettings,
type GelMessage,
isAnthropicModel,
} from "./edgedb-chat-settings";
import { edgedbFailedResponseHandler } from "./edgedb-error";
} from "./gel-chat-settings";
import { gelFailedResponseHandler } from "./gel-error";
import {
mapEdgedbStopReason,
getResponseMetadata,
mapOpenAICompletionLogProbs,
} from "./utils";
import { convertToEdgeDBMessages } from "./convert-to-edgedb-messages";
import { prepareTools } from "./edgedb-prepare-tools";
import { convertToGelMessages } from "./convert-to-gel-messages";
import { prepareTools } from "./gel-prepare-tools";

export interface EdgeDBChatConfig {
export interface GelChatConfig {
provider: string;
fetch: FetchFunction;
// baseURL: string | null;
headers: () => Record<string, string | undefined>;
}

export class EdgeDBChatLanguageModel implements LanguageModelV1 {
export class GelChatLanguageModel implements LanguageModelV1 {
readonly specificationVersion = "v1";
readonly defaultObjectGenerationMode = "json";
readonly supportsImageUrls = false;

readonly modelId: EdgeDBChatModelId;
readonly settings: EdgeDBChatSettings;
readonly modelId: GelChatModelId;
readonly settings: GelChatSettings;

private readonly config: EdgeDBChatConfig;
private readonly config: GelChatConfig;

constructor(
modelId: EdgeDBChatModelId,
settings: EdgeDBChatSettings,
config: EdgeDBChatConfig,
modelId: GelChatModelId,
settings: GelChatSettings,
config: GelChatConfig,
) {
this.modelId = modelId;
this.settings = settings;
Expand Down Expand Up @@ -139,7 +139,7 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {

const baseArgs = {
model: this.modelId,
messages: convertToEdgeDBMessages(prompt),
messages: convertToGelMessages(prompt),
temperature,
max_tokens: maxTokens,
top_p: topP,
Expand Down Expand Up @@ -182,7 +182,7 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {
}
}

private buildPrompt(messages: EdgeDBMessage[]) {
private buildPrompt(messages: GelMessage[]) {
const providedPromptId =
this.settings.prompt &&
("name" in this.settings.prompt || "id" in this.settings.prompt);
Expand All @@ -201,7 +201,7 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {
};
}

private buildQuery(messages: EdgeDBMessage[]) {
private buildQuery(messages: GelMessage[]) {
return [...messages].reverse().find((msg) => msg.role === "user")!
.content[0].text;
}
Expand All @@ -223,10 +223,9 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {
query: this.buildQuery(messages),
stream: false,
},
failedResponseHandler: edgedbFailedResponseHandler,
successfulResponseHandler: createJsonResponseHandler(
edgedbRagResponseSchema,
),
failedResponseHandler: gelFailedResponseHandler,
successfulResponseHandler:
createJsonResponseHandler(gelRagResponseSchema),
abortSignal: options.abortSignal,
fetch: this.config.fetch,
});
Expand Down Expand Up @@ -273,9 +272,9 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {
query: this.buildQuery(messages),
stream: true,
},
failedResponseHandler: edgedbFailedResponseHandler,
failedResponseHandler: gelFailedResponseHandler,
successfulResponseHandler:
createEventSourceResponseHandler(edgedbRagChunkSchema),
createEventSourceResponseHandler(gelRagChunkSchema),
abortSignal: options.abortSignal,
fetch: this.config.fetch,
});
Expand Down Expand Up @@ -304,7 +303,7 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {
return {
stream: response.pipeThrough(
new TransformStream<
ParseResult<z.infer<typeof edgedbRagChunkSchema>>,
ParseResult<z.infer<typeof gelRagChunkSchema>>,
LanguageModelV1StreamPart
>({
transform(chunk, controller) {
Expand Down Expand Up @@ -461,7 +460,7 @@ export class EdgeDBChatLanguageModel implements LanguageModelV1 {
}
}

const edgedbRagResponseSchema = z.object({
const gelRagResponseSchema = z.object({
id: z.string().nullish(),
model: z.string().nullish(),
created: z.number().nullish(),
Expand Down Expand Up @@ -490,7 +489,7 @@ const edgedbRagResponseSchema = z.object({
.nullish(),
});

const edgedbRagChunkSchema = z.discriminatedUnion("type", [
const gelRagChunkSchema = z.discriminatedUnion("type", [
z.object({
type: z.literal("message_start"),
message: z.object({
Expand Down
Loading
Loading