Skip to content

Commit eefdcdf

Browse files
authored
Merge pull request #86 from Azure-Samples/ai-serivice-upgrade
upgrade sk,others to latest version
2 parents 5e8069f + 8fa1e1c commit eefdcdf

File tree

5 files changed

+80
-71
lines changed

5 files changed

+80
-71
lines changed

ai-service.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ spec:
3636
memory: 50Mi
3737
limits:
3838
cpu: 30m
39-
memory: 65Mi
39+
memory: 85Mi
4040
startupProbe:
4141
httpGet:
4242
path: /health

charts/aks-store-demo/templates/ai-service.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ spec:
7070
memory: 50Mi
7171
limits:
7272
cpu: 30m
73-
memory: 65Mi
73+
memory: 85Mi
7474
startupProbe:
7575
httpGet:
7676
path: /health

src/ai-service/requirements.txt

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
fastapi==0.95.2
1+
fastapi==0.105.0
22
uvicorn==0.22.0
3-
pydantic==1.10.8
3+
pydantic==2.5.0
44
pytest==7.3.1
55
httpx
66
pyyaml
7-
semantic-kernel==0.3.1.dev0
7+
semantic-kernel==0.4.2.dev0
88
azure.identity==1.14.0
99
requests==2.31.0

src/ai-service/routers/LLM.py

+70
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
from azure.identity import DefaultAzureCredential
2+
import semantic_kernel as sk
3+
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion
4+
from dotenv import load_dotenv
5+
import os
6+
7+
8+
def get_llm():
9+
""" Function to initialize the LLM so that it can be used in the app """
10+
# Set the useLocalLLM and useAzureOpenAI variables based on environment variables
11+
useLocalLLM: bool = False
12+
useAzureOpenAI: bool = False
13+
kernel = False
14+
15+
if os.environ.get("USE_LOCAL_LLM"):
16+
useLocalLLM = os.environ.get("USE_LOCAL_LLM").lower() == "true"
17+
18+
if os.environ.get("USE_AZURE_OPENAI"):
19+
useAzureOpenAI = os.environ.get("USE_AZURE_OPENAI").lower() == "true"
20+
21+
# if useLocalLLM and useAzureOpenAI are both set to true, raise an exception
22+
if useLocalLLM and useAzureOpenAI:
23+
raise Exception("USE_LOCAL_LLM and USE_AZURE_OPENAI environment variables cannot both be set to true")
24+
25+
# if useLocalLLM or useAzureOpenAI are set to true, get the endpoint from the environment variables
26+
if useLocalLLM or useAzureOpenAI:
27+
endpoint: str = os.environ.get("AI_ENDPOINT") or os.environ.get("AZURE_OPENAI_ENDPOINT")
28+
29+
if isinstance(endpoint, str) == False or endpoint == "":
30+
raise Exception("AI_ENDPOINT or AZURE_OPENAI_ENDPOINT environment variable must be set when USE_LOCAL_LLM or USE_AZURE_OPENAI is set to true")
31+
32+
# if not using local LLM, set up the semantic kernel
33+
if useLocalLLM:
34+
print("Using Local LLM")
35+
else:
36+
print("Using OpenAI and setting up Semantic Kernel")
37+
# Load environment variables from .env file
38+
load_dotenv()
39+
40+
# Initialize the semantic kernel
41+
kernel: sk.Kernel = sk.Kernel()
42+
43+
kernel = sk.Kernel()
44+
45+
# Get the Azure OpenAI deployment name, API key, and endpoint or OpenAI org id from environment variables
46+
api_key: str = os.environ.get("OPENAI_API_KEY")
47+
useAzureAD: str = os.environ.get("USE_AZURE_AD")
48+
49+
if (isinstance(api_key, str) == False or api_key == "") and (isinstance(useAzureAD, str) == False or useAzureAD == ""):
50+
raise Exception("OPENAI_API_KEY environment variable must be set")
51+
52+
if not useAzureOpenAI:
53+
org_id = os.environ.get("OPENAI_ORG_ID")
54+
if isinstance(org_id, str) == False or org_id == "":
55+
raise Exception("OPENAI_ORG_ID environment variable must be set when USE_AZURE_OPENAI is set to False")
56+
# Add the OpenAI text completion service to the kernel
57+
kernel.add_chat_service("dv", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id))
58+
59+
else:
60+
deployment: str = os.environ.get("AZURE_OPENAI_DEPLOYMENT_NAME")
61+
# Add the Azure OpenAI text completion service to the kernel
62+
if isinstance(useAzureAD, str) == True and useAzureAD.lower() == "true":
63+
print("Authenticating to Azure OpenAI with Azure AD Workload Identity")
64+
credential = DefaultAzureCredential()
65+
access_token = credential.get_token("https://cognitiveservices.azure.com/.default")
66+
kernel.add_chat_service("dv", AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, ad_token=access_token.token))
67+
else:
68+
print("Authenticating to Azure OpenAI with OpenAI API key")
69+
kernel.add_chat_service("dv", AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=api_key))
70+
return kernel, useLocalLLM, endpoint

src/ai-service/routers/description_generator.py

+5-66
Original file line numberDiff line numberDiff line change
@@ -1,74 +1,13 @@
1-
from azure.identity import DefaultAzureCredential
1+
from typing import Any, List, Dict
22
from fastapi import APIRouter, Request, status
33
from fastapi.responses import Response, JSONResponse
4-
import semantic_kernel as sk
5-
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion
6-
from dotenv import load_dotenv
7-
from typing import Any, List, Dict
8-
import os
94
import requests
105
import json
6+
from routers.LLM import get_llm
117

12-
# Set the useLocalLLM and useAzureOpenAI variables based on environment variables
13-
useLocalLLM: bool = False
14-
useAzureOpenAI: bool = False
15-
16-
if os.environ.get("USE_LOCAL_LLM"):
17-
useLocalLLM = os.environ.get("USE_LOCAL_LLM").lower() == "true"
18-
19-
if os.environ.get("USE_AZURE_OPENAI"):
20-
useAzureOpenAI = os.environ.get("USE_AZURE_OPENAI").lower() == "true"
21-
22-
# if useLocalLLM and useAzureOpenAI are both set to true, raise an exception
23-
if useLocalLLM and useAzureOpenAI:
24-
raise Exception("USE_LOCAL_LLM and USE_AZURE_OPENAI environment variables cannot both be set to true")
25-
26-
# if useLocalLLM or useAzureOpenAI are set to true, get the endpoint from the environment variables
27-
if useLocalLLM or useAzureOpenAI:
28-
endpoint: str = os.environ.get("AI_ENDPOINT") or os.environ.get("AZURE_OPENAI_ENDPOINT")
29-
30-
if isinstance(endpoint, str) == False or endpoint == "":
31-
raise Exception("AI_ENDPOINT or AZURE_OPENAI_ENDPOINT environment variable must be set when USE_LOCAL_LLM or USE_AZURE_OPENAI is set to true")
32-
33-
# if not using local LLM, set up the semantic kernel
34-
if useLocalLLM:
35-
print("Using Local LLM")
36-
else:
37-
print("Using OpenAI and setting up Semantic Kernel")
38-
# Load environment variables from .env file
39-
load_dotenv()
40-
41-
# Initialize the semantic kernel
42-
kernel: sk.Kernel = sk.Kernel()
43-
44-
kernel = sk.Kernel()
45-
46-
# Get the Azure OpenAI deployment name, API key, and endpoint or OpenAI org id from environment variables
47-
api_key: str = os.environ.get("OPENAI_API_KEY")
48-
useAzureAD: str = os.environ.get("USE_AZURE_AD")
49-
50-
if (isinstance(api_key, str) == False or api_key == "") and (isinstance(useAzureAD, str) == False or useAzureAD == ""):
51-
raise Exception("OPENAI_API_KEY environment variable must be set")
52-
53-
if not useAzureOpenAI:
54-
org_id = os.environ.get("OPENAI_ORG_ID")
55-
if isinstance(org_id, str) == False or org_id == "":
56-
raise Exception("OPENAI_ORG_ID environment variable must be set when USE_AZURE_OPENAI is set to False")
57-
# Add the OpenAI text completion service to the kernel
58-
kernel.add_chat_service("dv", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id))
59-
60-
else:
61-
deployment: str = os.environ.get("AZURE_OPENAI_DEPLOYMENT_NAME")
62-
# Add the Azure OpenAI text completion service to the kernel
63-
if isinstance(useAzureAD, str) == True and useAzureAD.lower() == "true":
64-
print("Authenticating to Azure OpenAI with Azure AD Workload Identity")
65-
credential = DefaultAzureCredential()
66-
access_token = credential.get_token("https://cognitiveservices.azure.com/.default")
67-
kernel.add_chat_service("dv", AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=access_token.token, ad_auth=True))
68-
else:
69-
print("Authenticating to Azure OpenAI with OpenAI API key")
70-
kernel.add_chat_service("dv", AzureChatCompletion(deployment, endpoint, api_key))
71-
8+
# initialize the model that would be used for the app
9+
kernel, useLocalLLM, endpoint = get_llm()
10+
if not useLocalLLM:
7211
# Import semantic skills from the "skills" directory
7312
skills_directory: str = "skills"
7413
productFunctions: dict = kernel.import_semantic_skill_from_directory(skills_directory, "ProductSkill")

0 commit comments

Comments
 (0)