-
Notifications
You must be signed in to change notification settings - Fork 53
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
4c85f0b
commit 462b51d
Showing
12 changed files
with
706 additions
and
59 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
## Agent Chains | ||
|
||
### Agent Chain Element (AgentChainElement) | ||
Element of an agent chain | ||
|
||
::: llama_cpp_agent.chain.AgentChainElement | ||
|
||
### Sequential Chain (AgentChain) | ||
Sequentially invoked chain. | ||
|
||
::: llama_cpp_agent.chain.AgentChain | ||
|
||
### Map Chain (MapChain) | ||
Maps over a list of items and then combines the results using another chain | ||
|
||
::: llama_cpp_agent.chain.MapChain |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
### llama-index tools example | ||
```python | ||
# Example that uses the FunctionCallingAgent class to use llama_index tools and query engines. | ||
|
||
# Import necessary classes of llama-cpp-agent | ||
from llama_cpp_agent.function_calling import LlamaCppFunctionTool | ||
from llama_cpp_agent.function_calling_agent import FunctionCallingAgent | ||
from llama_cpp_agent.messages_formatter import MessagesFormatterType | ||
from llama_cpp_agent.providers.llama_cpp_endpoint_provider import LlamaCppEndpointSettings, LlamaCppGenerationSettings | ||
|
||
# Code taken from llama-index example to create a query engine for asking questions | ||
# https://docs.llamaindex.ai/en/stable/examples/agent/react_agent_with_query_engine/ | ||
|
||
# Import necessary classes of llama-index | ||
from llama_index.core import ( | ||
SimpleDirectoryReader, | ||
VectorStoreIndex, | ||
StorageContext, | ||
load_index_from_storage, Settings, | ||
) | ||
from llama_index.core.tools import QueryEngineTool, ToolMetadata | ||
|
||
# Setting the default llm of llama-index to None, llama-index will throw error otherwise! | ||
Settings.llm = None | ||
|
||
|
||
# load data | ||
lyft_docs = SimpleDirectoryReader( | ||
input_files=["./data/10k/lyft_2021.pdf"] | ||
).load_data() | ||
uber_docs = SimpleDirectoryReader( | ||
input_files=["./data/10k/uber_2021.pdf"] | ||
).load_data() | ||
|
||
# build index | ||
lyft_index = VectorStoreIndex.from_documents(lyft_docs, embed_model="local") | ||
uber_index = VectorStoreIndex.from_documents(uber_docs, embed_model="local") | ||
|
||
# Create the query engines for lyft and uber. | ||
|
||
lyft_engine = lyft_index.as_query_engine(similarity_top_k=3) | ||
uber_engine = uber_index.as_query_engine(similarity_top_k=3) | ||
|
||
# Create a list query engine tools. | ||
query_engine_tools = [ | ||
QueryEngineTool( | ||
query_engine=lyft_engine, | ||
metadata=ToolMetadata( | ||
name="lyft_10k", | ||
description=( | ||
"Provides information about Lyft financials for year 2021. " | ||
"Use a detailed plain text question as input to the tool." | ||
), | ||
), | ||
), | ||
QueryEngineTool( | ||
query_engine=uber_engine, | ||
metadata=ToolMetadata( | ||
name="uber_10k", | ||
description=( | ||
"Provides information about Uber financials for year 2021. " | ||
"Use a detailed plain text question as input to the tool." | ||
), | ||
), | ||
), | ||
] | ||
|
||
# Initialize the llama-cpp-agent LLM and the generation parameters. | ||
generation_settings = LlamaCppGenerationSettings(temperature=0.45, top_p=1.0, top_k=0, stream=True) | ||
main_model = LlamaCppEndpointSettings("http://localhost:8080/completion") | ||
|
||
# Creating LlamaCppFunctionTool instances out of the llama-index query engine tools. | ||
# We pass the llama-index query engine tools to the from_llama_index_tool function of the LlamaCppFunctionTool class and create the llama-cpp-agent tools. | ||
lyft_query_engine_tool = LlamaCppFunctionTool.from_llama_index_tool(query_engine_tools[0]) | ||
|
||
uber_query_engine_tool = LlamaCppFunctionTool.from_llama_index_tool(query_engine_tools[1]) | ||
|
||
|
||
function_call_agent = FunctionCallingAgent( | ||
main_model, | ||
llama_generation_settings=generation_settings, | ||
# Pass the LlamaCppFunctionTool instances as a list to the agent. | ||
llama_cpp_function_tools=[lyft_query_engine_tool, uber_query_engine_tool], | ||
allow_parallel_function_calling=True, | ||
messages_formatter_type=MessagesFormatterType.CHATML, | ||
debug_output=True) | ||
|
||
user_input = "What was Lyft's revenue growth in 2021?" | ||
function_call_agent.generate_response(user_input) | ||
|
||
``` | ||
Example Output: | ||
```text | ||
[ | ||
{ | ||
"thoughts_and_reasoning": "The user has asked for Lyft's revenue growth in the year 2021. Based on the context information provided by the 'lyft_10k' function call, we can determine that Lyft's revenue increased by 36% in 2021 compared to the previous year.", | ||
"function": "send_message", | ||
"parameters": { | ||
"content": "Lyft's revenue grew by 36% in the year 2021." | ||
} | ||
} | ||
] | ||
Lyft's revenue grew by 36% in the year 2021. | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
### Manual Function Calling with Python Function Example | ||
This example shows how to do function calling using actual Python functions. | ||
|
||
```python | ||
from llama_cpp import Llama | ||
from typing import Union | ||
import math | ||
|
||
from llama_cpp_agent.llm_agent import LlamaCppAgent | ||
|
||
from llama_cpp_agent.messages_formatter import MessagesFormatterType | ||
from llama_cpp_agent.function_calling import LlamaCppFunctionTool | ||
from llama_cpp_agent.gbnf_grammar_generator.gbnf_grammar_from_pydantic_models import create_dynamic_model_from_function | ||
|
||
|
||
def calculate_a_to_the_power_b(a: Union[int, float], b: Union[int, float]): | ||
""" | ||
Calculates 'a' to the power 'b' and returns the result | ||
""" | ||
return f"Result: {math.pow(a, b)}" | ||
|
||
|
||
function_tools = [LlamaCppFunctionTool(calculate_a_to_the_power_b)] | ||
|
||
function_tool_registry = LlamaCppAgent.get_function_tool_registry(function_tools) | ||
|
||
main_model = Llama( | ||
"../../gguf-models/openhermes-2.5-mistral-7b-16k.Q8_0.gguf", | ||
n_gpu_layers=49, | ||
offload_kqv=True, | ||
f16_kv=True, | ||
use_mlock=False, | ||
embedding=False, | ||
n_threads=8, | ||
n_batch=1024, | ||
n_ctx=8192, | ||
last_n_tokens_size=1024, | ||
verbose=True, | ||
seed=42, | ||
) | ||
|
||
llama_cpp_agent = LlamaCppAgent(main_model, debug_output=True, | ||
system_prompt="You are an advanced AI, tasked to assist the user by calling functions in JSON format. The following are the available functions and their parameters and types:\n\n" + function_tool_registry.get_documentation(), | ||
predefined_messages_formatter_type=MessagesFormatterType.CHATML) | ||
user_input = "Calculate 5 to power 42" | ||
|
||
print(llama_cpp_agent.get_chat_response(user_input, temperature=0.45, function_tool_registry=function_tool_registry)) | ||
|
||
``` | ||
Example output | ||
```text | ||
{ "function": "calculate-a-to-the-power-b","function_parameters": { "a": 5 , "b": 42 }} | ||
Result: 2.2737367544323207e+29 | ||
``` |
Oops, something went wrong.