Skip to content

Commit d186fb2

Browse files
committed
Update tests and readme.
1 parent 413bc0b commit d186fb2

File tree

5 files changed

+12
-28
lines changed

5 files changed

+12
-28
lines changed

.github/workflows/workflow.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# TODO

.gitignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
11
.vscode
22
**/__pycache__
3-
examples
3+
examples
4+
pypi_token.token
5+
build
6+
dist
7+
langchain_bytez.egg-info

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,8 @@ results = bytez_chat_model_phi.invoke(messages)
116116

117117
```
118118

119+
To roll our own implementation that better suites your needs, check out the implementation [here](https://github.com/Bytez-com/langchain_bytez/blob/main/langchain_bytez/BytezStdOutCallbackHandler.py)
120+
119121
# Shutdown your cluster
120122

121123
```py

langchain_bytez/test.py

Lines changed: 2 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -5,40 +5,17 @@
55

66
from langchain.schema import HumanMessage, SystemMessage
77
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
8-
from langchain.callbacks.stdout import StdOutCallbackHandler
98
from langchain.utils import print_text
109
from langchain_core.messages import AIMessage, BaseMessageChunk
11-
from langchain_core.outputs import LLMResult
1210

1311
from langchain_bytez import BytezLLM
1412
from langchain_bytez import BytezChatModel
13+
from langchain_bytez import BytezStdOutCallbackHandler
1514

1615

1716
# sudo cat /var/log/cloud-init-output.log
1817

1918

20-
class ExtendedStdOutCallbackHandler(StdOutCallbackHandler):
21-
def print(self, *args, color: str = None, **kwargs):
22-
text = " ".join([str(arg) for arg in args])
23-
print_text(f"\n{text}", color or self.color, **kwargs)
24-
25-
def _on_start(self, serialized, prompts, *args, **kwargs):
26-
model_type = kwargs["invocation_params"]["model_type"]
27-
model_name = kwargs["invocation_params"]["model_name"]
28-
29-
self.print(f"Model started: {model_name} ({model_type})", color="blue")
30-
self.print("Prompt:", prompts, color="yellow")
31-
32-
def on_llm_start(self, *args, **kwargs):
33-
self._on_start(*args, **kwargs)
34-
35-
def on_llm_end(self, response: LLMResult, *args, **kwargs):
36-
self.print("Model responded with: ", response, "\n")
37-
38-
def on_chat_model_start(self, *args, **kwargs):
39-
self._on_start(*args, **kwargs)
40-
41-
4219
async def run_async_test():
4320
API_KEY = os.environ.get("API_KEY")
4421

@@ -61,7 +38,7 @@ async def run_async_test():
6138
callbacks = [
6239
#
6340
StreamingStdOutCallbackHandler(),
64-
ExtendedStdOutCallbackHandler(color="green"),
41+
BytezStdOutCallbackHandler(color="green"),
6542
]
6643

6744
bytez_chat_model_phi = BytezChatModel(

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,13 @@
22

33
setup(
44
name="langchain_bytez", # Name of your package
5-
version="0.0.0",
5+
version="0.0.5",
66
packages=find_packages(),
77
description="Bytez langchain integration",
88
long_description=open("./README.md").read(),
99
long_description_content_type="text/markdown",
1010
author="Bytez",
11-
url="https://github.com/bytez-com/docs",
11+
url="https://github.com/Bytez-com/langchain_bytez",
1212
install_requires=["langchain==0.3.17"],
1313
classifiers=[
1414
"Development Status :: 4 - Beta",

0 commit comments

Comments
 (0)