Skip to content

Commit 1b76896

Browse files
committed
Use @langchain/community WebLLM model
1 parent f020d56 commit 1b76896

File tree

7 files changed

+388
-206
lines changed

7 files changed

+388
-206
lines changed

.env.example .env.local.example

File renamed without changes.

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ yarn-error.log*
2626

2727
# local env files
2828
.env*.local
29+
.env
2930

3031
# vercel
3132
.vercel
@@ -34,4 +35,4 @@ yarn-error.log*
3435
*.tsbuildinfo
3536
next-env.d.ts
3637

37-
.yarn
38+
.yarn

app/lib/chat_models/webllm.ts

-156
This file was deleted.

app/worker.ts

+7-7
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import { LangChainTracer } from "@langchain/core/tracers/tracer_langchain";
2929
import { Client } from "langsmith";
3030

3131
import { ChatOllama } from "@langchain/community/chat_models/ollama";
32-
import { ChatWebLLM } from "./lib/chat_models/webllm";
32+
import { ChatWebLLM } from "@langchain/community/chat_models/webllm";
3333

3434
const embeddings = new HuggingFaceTransformersEmbeddings({
3535
modelName: "Xenova/all-MiniLM-L6-v2",
@@ -229,15 +229,15 @@ self.addEventListener("message", async (event: { data: any }) => {
229229
} else {
230230
const modelProvider = event.data.modelProvider;
231231
const modelConfig = event.data.modelConfig;
232-
let chatModel: BaseChatModel | LanguageModelLike =
233-
modelProvider === "ollama"
234-
? new ChatOllama(modelConfig)
235-
: new ChatWebLLM(modelConfig);
232+
let chatModel: BaseChatModel | LanguageModelLike;
236233
if (modelProvider === "webllm") {
237-
await (chatModel as ChatWebLLM).initialize((event) =>
234+
const webllmModel = new ChatWebLLM(modelConfig);
235+
await webllmModel.initialize((event) =>
238236
self.postMessage({ type: "init_progress", data: event }),
239237
);
240-
chatModel = chatModel.bind({ stop: ["\nInstruct:", "Instruct:"] });
238+
chatModel = webllmModel.bind({ stop: ["\nInstruct:", "Instruct:"] });
239+
} else {
240+
chatModel = new ChatOllama(modelConfig);
241241
}
242242
try {
243243
await queryVectorStore(event.data.messages, {

components/ChatWindow.tsx

+7-20
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import 'react-toastify/dist/ReactToastify.css';
66
import { useRef, useState, useEffect } from "react";
77
import type { FormEvent } from "react";
88

9-
import { ChatMessageBubble } from "@/components/ChatMessageBubble";
9+
import { ChatMessageBubble } from '@/components/ChatMessageBubble';
1010
import { ChatWindowMessage } from '@/schema/ChatWindowMessage';
1111

1212
export function ChatWindow(props: {
@@ -35,25 +35,12 @@ export function ChatWindow(props: {
3535
controller.close();
3636
return;
3737
}
38-
// Config copied from:
39-
// https://github.com/mlc-ai/web-llm/blob/eaaff6a7730b6403810bb4fd2bbc4af113c36050/examples/simple-chat/src/gh-config.js
38+
// See https://github.com/mlc-ai/web-llm/blob/main/src/config.ts for a list of available models
4039
const webLLMConfig = {
41-
temperature: 0.1,
42-
modelRecord: {
43-
"model_url": "https://huggingface.co/mlc-ai/phi-2-q4f32_1-MLC/resolve/main/",
44-
"local_id": "Phi2-q4f32_1",
45-
"model_lib_url": "https://raw.githubusercontent.com/mlc-ai/binary-mlc-llm-libs/main/phi-2/phi-2-q4f32_1-ctx2k-webgpu.wasm",
46-
"vram_required_MB": 4032.48,
47-
"low_resource_required": false,
40+
model: "Phi2-q4f32_1",
41+
chatOptions: {
42+
temperature: 0.1,
4843
},
49-
// {
50-
// "model_url": "https://huggingface.co/mlc-ai/phi-2-q0f16-MLC/resolve/main/",
51-
// "local_id": "Phi2-q0f16",
52-
// "model_lib_url": "https://raw.githubusercontent.com/mlc-ai/binary-mlc-llm-libs/main/phi-2/phi-2-q0f16-ctx2k-webgpu.wasm",
53-
// "vram_required_MB": 11079.47,
54-
// "low_resource_required": false,
55-
// "required_features": ["shader-f16"],
56-
// },
5744
};
5845
const ollamaConfig = {
5946
baseUrl: "http://localhost:11435",
@@ -89,15 +76,15 @@ export function ChatWindow(props: {
8976
initProgressToastId.current = toast(
9077
"Loading model weights... This may take a while",
9178
{
92-
progress: e.data.data.progress,
79+
progress: e.data.data.progress || 0.01,
9380
theme: "dark"
9481
}
9582
);
9683
} else {
9784
if (e.data.data.progress === 1) {
9885
await new Promise((resolve) => setTimeout(resolve, 2000));
9986
}
100-
toast.update(initProgressToastId.current, { progress: e.data.data.progress });
87+
toast.update(initProgressToastId.current, { progress: e.data.data.progress || 0.01 });
10188
}
10289
break
10390
case "chunk":

package.json

+5-2
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@
1313
"node": ">=18"
1414
},
1515
"dependencies": {
16-
"@langchain/community": "^0.0.40",
17-
"@mlc-ai/web-llm": "^0.2.28",
16+
"@langchain/community": "^0.0.57",
17+
"@mlc-ai/web-llm": "^0.2.35",
1818
"@types/node": "20.4.5",
1919
"@types/react": "18.2.17",
2020
"@types/react-dom": "18.2.7",
@@ -37,5 +37,8 @@
3737
},
3838
"devDependencies": {
3939
"prettier": "3.0.0"
40+
},
41+
"resolutions": {
42+
"@langchain/core": "0.1.63"
4043
}
4144
}

0 commit comments

Comments
 (0)