File tree 6 files changed +109
-0
lines changed
components/EmbeddingSelection/MistralAiOptions
GeneralSettings/EmbeddingPreference
OnboardingFlow/Steps/DataHandling
6 files changed +109
-0
lines changed Original file line number Diff line number Diff line change
1
+ export default function MistralAiOptions ( { settings } ) {
2
+ return (
3
+ < div className = "w-full flex flex-col gap-y-4" >
4
+ < div className = "w-full flex items-center gap-[36px] mt-1.5" >
5
+ < div className = "flex flex-col w-60" >
6
+ < label className = "text-white text-sm font-semibold block mb-3" >
7
+ API Key
8
+ </ label >
9
+ < input
10
+ type = "password"
11
+ name = "MistralAiApiKey"
12
+ className = "bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
13
+ placeholder = "Mistral AI API Key"
14
+ defaultValue = { settings ?. MistralApiKey ? "*" . repeat ( 20 ) : "" }
15
+ required = { true }
16
+ autoComplete = "off"
17
+ spellCheck = { false }
18
+ />
19
+ </ div >
20
+ < div className = "flex flex-col w-60" >
21
+ < label className = "text-white text-sm font-semibold block mb-3" >
22
+ Model Preference
23
+ </ label >
24
+ < select
25
+ name = "EmbeddingModelPref"
26
+ required = { true }
27
+ defaultValue = { settings ?. EmbeddingModelPref }
28
+ className = "bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
29
+ >
30
+ < optgroup label = "Available embedding models" >
31
+ { [
32
+ "mistral-embed" ,
33
+ ] . map ( ( model ) => {
34
+ return (
35
+ < option key = { model } value = { model } >
36
+ { model }
37
+ </ option >
38
+ ) ;
39
+ } ) }
40
+ </ optgroup >
41
+ </ select >
42
+ </ div >
43
+ </ div >
44
+ </ div >
45
+ ) ;
46
+ }
Original file line number Diff line number Diff line change @@ -13,6 +13,7 @@ import CohereLogo from "@/media/llmprovider/cohere.png";
13
13
import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png" ;
14
14
import LiteLLMLogo from "@/media/llmprovider/litellm.png" ;
15
15
import GenericOpenAiLogo from "@/media/llmprovider/generic-openai.png" ;
16
+ import MistralAiLogo from "@/media/llmprovider/mistral.jpeg" ;
16
17
17
18
import PreLoader from "@/components/Preloader" ;
18
19
import ChangeWarningModal from "@/components/ChangeWarning" ;
@@ -33,6 +34,7 @@ import { useModal } from "@/hooks/useModal";
33
34
import ModalWrapper from "@/components/ModalWrapper" ;
34
35
import CTAButton from "@/components/lib/CTAButton" ;
35
36
import { useTranslation } from "react-i18next" ;
37
+ import MistralAiOptions from "@/components/EmbeddingSelection/MistralAiOptions" ;
36
38
37
39
const EMBEDDERS = [
38
40
{
@@ -100,6 +102,13 @@ const EMBEDDERS = [
100
102
options : ( settings ) => < LiteLLMOptions settings = { settings } /> ,
101
103
description : "Run powerful embedding models from LiteLLM." ,
102
104
} ,
105
+ {
106
+ name : "Mistral AI" ,
107
+ value : "mistral" ,
108
+ logo : MistralAiLogo ,
109
+ options : ( settings ) => < MistralAiOptions settings = { settings } /> ,
110
+ description : "Run powerful embedding models from Mistral AI." ,
111
+ } ,
103
112
{
104
113
name : "Generic OpenAI" ,
105
114
value : "generic-openai" ,
Original file line number Diff line number Diff line change @@ -349,6 +349,13 @@ export const EMBEDDING_ENGINE_PRIVACY = {
349
349
] ,
350
350
logo : VoyageAiLogo ,
351
351
} ,
352
+ mistral : {
353
+ name : "Mistral AI" ,
354
+ description : [
355
+ "Data sent to Mistral AI's servers is shared according to the terms of service of https://mistral.ai." ,
356
+ ] ,
357
+ logo : MistralLogo ,
358
+ } ,
352
359
litellm : {
353
360
name : "LiteLLM" ,
354
361
description : [
Original file line number Diff line number Diff line change
1
+ class MistralEmbedder {
2
+ constructor ( ) {
3
+ if ( ! process . env . MISTRAL_API_KEY )
4
+ throw new Error ( "No Mistral API key was set." ) ;
5
+
6
+ const { OpenAI : OpenAIApi } = require ( "openai" ) ;
7
+ this . openai = new OpenAIApi ( {
8
+ baseURL : "https://api.mistral.ai/v1" ,
9
+ apiKey : process . env . MISTRAL_API_KEY ?? null ,
10
+ } ) ;
11
+ this . model = process . env . EMBEDDING_MODEL_PREF || "mistral-embed" ;
12
+ }
13
+
14
+ async embedTextInput ( textInput ) {
15
+ try {
16
+ const response = await this . openai . embeddings . create ( {
17
+ model : this . model ,
18
+ input : textInput ,
19
+ } ) ;
20
+ return response ?. data [ 0 ] ?. embedding || [ ] ;
21
+ } catch ( error ) {
22
+ console . error ( "Failed to get embedding from Mistral." , error . message ) ;
23
+ return [ ] ;
24
+ }
25
+ }
26
+
27
+ async embedChunks ( textChunks = [ ] ) {
28
+ try {
29
+ const response = await this . openai . embeddings . create ( {
30
+ model : this . model ,
31
+ input : textChunks ,
32
+ } ) ;
33
+ return response ?. data ?. map ( ( emb ) => emb . embedding ) || [ ] ;
34
+ } catch ( error ) {
35
+ console . error ( "Failed to get embeddings from Mistral." , error . message ) ;
36
+ return new Array ( textChunks . length ) . fill ( [ ] ) ;
37
+ }
38
+ }
39
+ }
40
+
41
+ module . exports = {
42
+ MistralEmbedder,
43
+ } ;
Original file line number Diff line number Diff line change @@ -214,6 +214,9 @@ function getEmbeddingEngineSelection() {
214
214
case "litellm" :
215
215
const { LiteLLMEmbedder } = require ( "../EmbeddingEngines/liteLLM" ) ;
216
216
return new LiteLLMEmbedder ( ) ;
217
+ case "mistral" :
218
+ const { MistralEmbedder } = require ( "../EmbeddingEngines/mistral" ) ;
219
+ return new MistralEmbedder ( ) ;
217
220
case "generic-openai" :
218
221
const {
219
222
GenericOpenAiEmbedder,
Original file line number Diff line number Diff line change @@ -753,6 +753,7 @@ function supportedEmbeddingModel(input = "") {
753
753
"voyageai" ,
754
754
"litellm" ,
755
755
"generic-openai" ,
756
+ "mistral" ,
756
757
] ;
757
758
return supported . includes ( input )
758
759
? null
You can’t perform that action at this time.
0 commit comments