Skip to content

Commit d341030

Browse files
committed
update comment on stream handling
1 parent 171ed8e commit d341030

File tree

1 file changed

+1
-6
lines changed

1 file changed

+1
-6
lines changed

chatlab/conversation.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -231,14 +231,12 @@ async def submit(self, *messages: Union[ChatCompletionMessageParam, str], stream
231231

232232
manifest = self.function_registry.api_manifest()
233233

234+
# Due to the strict response typing based on `Literal` typing on `stream`, we have to process these two cases separately
234235
if stream:
235236
streaming_response = await client.chat.completions.create(
236237
model=self.model,
237238
messages=full_messages,
238239
**manifest,
239-
# Due to this openai beta migration, we're going to assume
240-
# only streaming and drop the non-streaming case for now until
241-
# types are working right.
242240
stream=True,
243241
temperature=kwargs.get("temperature", 0),
244242
)
@@ -249,9 +247,6 @@ async def submit(self, *messages: Union[ChatCompletionMessageParam, str], stream
249247
model=self.model,
250248
messages=full_messages,
251249
**manifest,
252-
# Due to this openai beta migration, we're going to assume
253-
# only streaming and drop the non-streaming case for now until
254-
# types are working right.
255250
stream=False,
256251
temperature=kwargs.get("temperature", 0),
257252
)

0 commit comments

Comments
 (0)