From c9ae2b9aef0589725cabaa38e80b6c3c0e702b8b Mon Sep 17 00:00:00 2001 From: Kyle Kelley Date: Tue, 27 Feb 2024 08:59:53 -0800 Subject: [PATCH] re-introduce legacy function calling --- CHANGELOG.md | 5 +++++ chatlab/chat.py | 25 ++++++++++++++++--------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb9a52e..30b9692 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,11 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [2.0.0] + +- Support parallel tool calling by default in `Chat`. +- Legacy support for function calling is available by passing `legacy_function_calling=True` to the `Chat` constructor. + ## [1.3.0] - Support tool call format from `FunctionRegistry`. Enables parallel function calling (note: not in `Chat` yet). https://github.com/rgbkrk/chatlab/pull/122 diff --git a/chatlab/chat.py b/chatlab/chat.py index c35ab7b..0d908b8 100644 --- a/chatlab/chat.py +++ b/chatlab/chat.py @@ -73,6 +73,7 @@ def __init__( chat_functions: Optional[List[Callable]] = None, allow_hallucinated_python: bool = False, python_hallucination_function: Optional[PythonHallucinationFunction] = None, + legacy_function_calling: bool = False, ): """Initialize a Chat with an optional initial context of messages. @@ -99,6 +100,8 @@ def __init__( self.api_key = openai_api_key self.base_url = base_url + self.legacy_function_calling = legacy_function_calling + if initial_context is None: initial_context = [] # type: ignore @@ -295,28 +298,32 @@ async def submit(self, *messages: Union[ChatCompletionMessageParam, str], stream base_url=self.base_url, ) + chat_create_kwargs = { + "model": self.model, + "messages": full_messages, + "temperature": kwargs.get("temperature", 0), + } + # Due to the strict response typing based on `Literal` typing on `stream`, we have to process these # two cases separately if stream: + if self.legacy_function_calling: + chat_create_kwargs.update(self.function_registry.api_manifest()) + else: + chat_create_kwargs["tools"] = self.function_registry.tools + streaming_response = await client.chat.completions.create( - model=self.model, - messages=full_messages, - tools=self.function_registry.tools, + **chat_create_kwargs, stream=True, - temperature=kwargs.get("temperature", 0), ) self.append(*messages) finish_reason, function_call_request, tool_arguments = await self.__process_stream(streaming_response) else: - # TODO: Process tools for non stream full_response = await client.chat.completions.create( - model=self.model, - messages=full_messages, - tools=self.function_registry.tools, + **chat_create_kwargs, stream=False, - temperature=kwargs.get("temperature", 0), ) self.append(*messages)