pip install llmdk
from llmdk import Llmdk, Providers
# You can also set OPENAI_API_KEY
client = Llmdk(
provider=Providers.OPENAI,
model_name='gpt-4o-mini',
# api_key='***',
)
output = client.generate(
'Who are you?',
# system='Write in Portuguese.',
)
output = client.generate(
messages=[
# {'role': 'system', 'content': 'Write in Portuguese.'},
{'role': 'user', 'content': 'Who are you?'},
],
)
for chunk in client.stream(
'Who are you?',
# system='Write in Portuguese.',
):
print(chunk, end='', flush=True)
for chunk in client.stream([
# {'role': 'system', 'content': 'Write in Portuguese.'},
{'role': 'user', 'content': 'Who are you?'},
]):
print(chunk, end='', flush=True)
from llmdk import Llmdk, Providers
# You can also set ANTHROPIC_API_KEY
client = Llmdk(
provider=Providers.ANTHROPIC,
model_name='claude-3-5-sonnet-20240620',
# api_key='***',
)
from llmdk import Llmdk, Providers
# You can also set GROQ_API_KEY
client = Llmdk(
provider=Providers.GROQ,
model_name='llama-3.1-70b-versatile',
# api_key='***',
)
from llmdk import Llmdk, Providers
# You can also set HF_TOKEN
client = Llmdk(
provider=Providers.HUGGINGFACE,
model_name='meta-llama/Meta-Llama-3.1-70B-Instruct',
# api_key='***',
)
from llmdk import Llmdk, Providers
client = Llmdk(
provider=Providers.OLLAMA,
model_name='llama3.2:1b',
# base_url='http://localhost:11434',
)
from llmdk import Llmdk, Providers
# You can also set OPENAI_API_KEY
client = Llmdk(
provider=Providers.OPENAI,
model_name='gpt-4o-mini',
# api_key='***',
)