Skip to content

Commit

Permalink
Add API key option to LocalAI (#407)
Browse files Browse the repository at this point in the history
* Add API key option to LocalAI

* add api key for model dropdown selector
  • Loading branch information
timothycarambat authored Dec 4, 2023
1 parent 203f596 commit 6fa8b0c
Show file tree
Hide file tree
Showing 10 changed files with 63 additions and 15 deletions.
1 change: 1 addition & 0 deletions docker/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ CACHE_VECTORS="true"
# LOCAL_AI_BASE_PATH='http://host.docker.internal:8080/v1'
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
# LOCAL_AI_API_KEY="sk-123abc"

###########################################
######## Embedding API SElECTION ##########
Expand Down
45 changes: 36 additions & 9 deletions frontend/src/components/LLMSelection/LocalAiOptions/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,11 @@ import System from "../../../models/system";
export default function LocalAiOptions({ settings, showAlert = false }) {
const [basePathValue, setBasePathValue] = useState(settings?.LocalAiBasePath);
const [basePath, setBasePath] = useState(settings?.LocalAiBasePath);
function updateBasePath() {
setBasePath(basePathValue);
}
const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);

return (
<div className="w-full flex flex-col">
<div className="w-full flex flex-col gap-y-4">
{showAlert && (
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-6 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
<div className="gap-x-2 flex items-center">
Expand Down Expand Up @@ -44,10 +43,14 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
autoComplete="off"
spellCheck={false}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={updateBasePath}
onBlur={() => setBasePath(basePathValue)}
/>
</div>
<LocalAIModelSelection settings={settings} basePath={basePath} />
<LocalAIModelSelection
settings={settings}
basePath={basePath}
apiKey={apiKey}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Token context window
Expand All @@ -65,11 +68,35 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
/>
</div>
</div>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold block">
Local AI API Key
</label>
<p className="text-xs italic text-white/60">
optional API key to use if running LocalAI with API keys.
</p>
</div>

<input
type="password"
name="LocalAiApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
</div>
</div>
);
}

function LocalAIModelSelection({ settings, basePath = null }) {
function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);

Expand All @@ -81,12 +108,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
return;
}
setLoading(true);
const { models } = await System.customModels("localai", null, basePath);
const { models } = await System.customModels("localai", apiKey, basePath);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath]);
}, [basePath, apiKey]);

if (loading || customModels.length == 0) {
return (
Expand Down
1 change: 1 addition & 0 deletions server/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
# LOCAL_AI_BASE_PATH='http://localhost:8080/v1'
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
# LOCAL_AI_API_KEY="sk-123abc"

###########################################
######## Embedding API SElECTION ##########
Expand Down
2 changes: 1 addition & 1 deletion server/endpoints/system.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ const {
acceptedFileTypes,
} = require("../utils/files/documentProcessor");
const { purgeDocument } = require("../utils/files/purgeDocument");
const { getVectorDbClass, getLLMProvider } = require("../utils/helpers");
const { getVectorDbClass } = require("../utils/helpers");
const { updateENV, dumpENV } = require("../utils/helpers/updateENV");
const {
reqBody,
Expand Down
1 change: 1 addition & 0 deletions server/models/systemSettings.js
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ const SystemSettings = {
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,

// For embedding credentials when localai is selected.
OpenAiKey: !!process.env.OPEN_AI_KEY,
Expand Down
5 changes: 5 additions & 0 deletions server/utils/AiProviders/localAi/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ class LocalAiLLM {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath: process.env.LOCAL_AI_BASE_PATH,
...(!!process.env.LOCAL_AI_API_KEY
? {
apiKey: process.env.LOCAL_AI_API_KEY,
}
: {}),
});
this.openai = new OpenAIApi(config);
this.model = process.env.LOCAL_AI_MODEL_PREF;
Expand Down
3 changes: 2 additions & 1 deletion server/utils/helpers/customModels.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ async function openAiModels(apiKey = null) {
return { models, error: null };
}

async function localAIModels(basePath = null) {
async function localAIModels(basePath = null, apiKey = null) {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath,
...(!!apiKey ? { apiKey } : {}),
});
const openai = new OpenAIApi(config);
const models = await openai
Expand Down
4 changes: 4 additions & 0 deletions server/utils/helpers/updateENV.js
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ const KEY_MAPPING = {
envKey: "LOCAL_AI_MODEL_TOKEN_LIMIT",
checks: [nonZero],
},
LocalAiApiKey: {
envKey: "LOCAL_AI_API_KEY",
checks: [],
},

EmbeddingEngine: {
envKey: "EMBEDDING_ENGINE",
Expand Down
8 changes: 8 additions & 0 deletions server/utils/http/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,19 @@ function multiUserMode(response) {
return response?.locals?.multiUserMode;
}

function parseAuthHeader(headerValue = null, apiKey = null) {
if (headerValue === null || apiKey === null) return {};
if (headerValue === "Authorization")
return { Authorization: `Bearer ${apiKey}` };
return { [headerValue]: apiKey };
}

module.exports = {
reqBody,
multiUserMode,
queryParams,
makeJWT,
decodeJWT,
userFromSession,
parseAuthHeader,
};
8 changes: 4 additions & 4 deletions server/utils/vectorDbProviders/chroma/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ const Chroma = {
...(!!process.env.CHROMA_API_HEADER && !!process.env.CHROMA_API_KEY
? {
fetchOptions: {
headers: {
[process.env.CHROMA_API_HEADER || "X-Api-Key"]:
process.env.CHROMA_API_KEY,
},
headers: parseAuthHeader(
process.env.CHROMA_API_HEADER || "X-Api-Key",
process.env.CHROMA_API_KEY
),
},
}
: {}),
Expand Down

0 comments on commit 6fa8b0c

Please sign in to comment.