Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 9, 2024
1 parent c3c2d5f commit a836be0
Showing 1 changed file with 8 additions and 9 deletions.
17 changes: 8 additions & 9 deletions comps/cores/mega/gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,19 +409,19 @@ async def handle_request(self, request: Request):
class DocSumGateway(Gateway):
def __init__(self, megaservice, host="0.0.0.0", port=8888):
super().__init__(
megaservice,
host,
port,
str(MegaServiceEndpoint.DOC_SUMMARY),
input_datatype= DocSumChatCompletionRequest,
output_datatype=ChatCompletionResponse
megaservice,
host,
port,
str(MegaServiceEndpoint.DOC_SUMMARY),
input_datatype=DocSumChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)

async def handle_request(self, request: Request):
data = await request.json()

Check warning on line 421 in comps/cores/mega/gateway.py

View check run for this annotation

Codecov / codecov/patch

comps/cores/mega/gateway.py#L421

Added line #L421 was not covered by tests
stream_opt = data.get("stream", True)
chat_request = ChatCompletionRequest.model_validate(data)

Check warning on line 423 in comps/cores/mega/gateway.py

View check run for this annotation

Codecov / codecov/patch

comps/cores/mega/gateway.py#L423

Added line #L423 was not covered by tests

prompt = self._handle_message(chat_request.messages)

Check warning on line 425 in comps/cores/mega/gateway.py

View check run for this annotation

Codecov / codecov/patch

comps/cores/mega/gateway.py#L425

Added line #L425 was not covered by tests
parameters = LLMParams(
max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024,
Expand All @@ -434,7 +434,7 @@ async def handle_request(self, request: Request):
streaming=stream_opt,
)
result_dict, runtime_graph = await self.megaservice.schedule(
initial_inputs={data['type']: prompt}, llm_parameters=parameters
initial_inputs={data["type"]: prompt}, llm_parameters=parameters
)
for node, response in result_dict.items():
# Here it suppose the last microservice in the megaservice is LLM.
Expand All @@ -458,7 +458,6 @@ async def handle_request(self, request: Request):
return ChatCompletionResponse(model="docsum", choices=choices, usage=usage)



class AudioQnAGateway(Gateway):
def __init__(self, megaservice, host="0.0.0.0", port=8888):
super().__init__(
Expand Down

0 comments on commit a836be0

Please sign in to comment.