From 2bba083b5aa1a0732e6a183f6ce3f76d8bad82c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=CE=94BL=C3=98=20=E1=84=83=CE=9E?= Date: Sun, 5 May 2024 23:00:22 -0500 Subject: [PATCH 1/5] enhance: one ci/publish worlflow only --- .github/workflows/python-ci.yml | 33 +++++++++++++++++++++-- .github/workflows/python-publish.yml | 39 ---------------------------- 2 files changed, 31 insertions(+), 41 deletions(-) delete mode 100644 .github/workflows/python-publish.yml diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml index 5d55dce..a37528e 100644 --- a/.github/workflows/python-ci.yml +++ b/.github/workflows/python-ci.yml @@ -1,6 +1,11 @@ name: Python-CI -on: [push] +on: + push: + branches: + - master + release: + types: [published] jobs: build: @@ -14,9 +19,33 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install dependencies and Build package + - name: Install dependencies run: | python -m pip install --upgrade pip pip install build - name: Build package run: python -m build + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + publish: + needs: build + runs-on: ubuntu-latest + if: github.event_name == 'release' && github.event.action == 'published' + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: dist + - name: Publish package + uses: pypa/gh-action-pypi-publish@3fbcf7ccf443305955ce16db9de8401f7dc1c7dd + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml deleted file mode 100644 index fbc8ebf..0000000 --- a/.github/workflows/python-publish.yml +++ /dev/null @@ -1,39 +0,0 @@ -# This workflow will upload a Python Package using Twine when a release is created -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries - -# This workflow uses actions that are not certified by GitHub. -# They are provided by a third-party and are governed by -# separate terms of service, privacy policy, and support -# documentation. - -name: Upload Python Package - -on: - release: - types: [published] - -permissions: - contents: read - -jobs: - deploy: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install build - - name: Build package - run: python -m build - - name: Publish package - uses: pypa/gh-action-pypi-publish@3fbcf7ccf443305955ce16db9de8401f7dc1c7dd - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} From 6d44012e135c176f2d0664475d7bd52f5f3b0ff9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=CE=94BL=C3=98=20=E1=84=83=CE=9E?= Date: Mon, 6 May 2024 22:36:38 -0500 Subject: [PATCH 2/5] chat template --- src/llama_cpp_agent/chat_template.py | 262 ++++++++++++++++++ .../chat_template/alpaca.jinja | 24 ++ .../chat_template/amberchat.jinja | 26 ++ .../chat_template/chatml.jinja | 18 ++ .../chat_template/chatqa.jinja | 36 +++ .../chat_template/falcon.jinja | 22 ++ src/llama_cpp_agent/chat_template/gemma.jinja | 31 +++ .../chat_template/llama-2.jinja | 25 ++ .../chat_template/llama-3.jinja | 24 ++ .../chat_template/mistral.jinja | 26 ++ .../chat_template/openchat.jinja | 20 ++ src/llama_cpp_agent/chat_template/phi-3.jinja | 17 ++ src/llama_cpp_agent/chat_template/saiga.jinja | 23 ++ src/llama_cpp_agent/chat_template/solar.jinja | 18 ++ .../chat_template/vicuna.jinja | 24 ++ .../chat_template/zephyr.jinja | 17 ++ 16 files changed, 613 insertions(+) create mode 100644 src/llama_cpp_agent/chat_template.py create mode 100644 src/llama_cpp_agent/chat_template/alpaca.jinja create mode 100644 src/llama_cpp_agent/chat_template/amberchat.jinja create mode 100644 src/llama_cpp_agent/chat_template/chatml.jinja create mode 100644 src/llama_cpp_agent/chat_template/chatqa.jinja create mode 100644 src/llama_cpp_agent/chat_template/falcon.jinja create mode 100644 src/llama_cpp_agent/chat_template/gemma.jinja create mode 100644 src/llama_cpp_agent/chat_template/llama-2.jinja create mode 100644 src/llama_cpp_agent/chat_template/llama-3.jinja create mode 100644 src/llama_cpp_agent/chat_template/mistral.jinja create mode 100644 src/llama_cpp_agent/chat_template/openchat.jinja create mode 100644 src/llama_cpp_agent/chat_template/phi-3.jinja create mode 100644 src/llama_cpp_agent/chat_template/saiga.jinja create mode 100644 src/llama_cpp_agent/chat_template/solar.jinja create mode 100644 src/llama_cpp_agent/chat_template/vicuna.jinja create mode 100644 src/llama_cpp_agent/chat_template/zephyr.jinja diff --git a/src/llama_cpp_agent/chat_template.py b/src/llama_cpp_agent/chat_template.py new file mode 100644 index 0000000..5dca324 --- /dev/null +++ b/src/llama_cpp_agent/chat_template.py @@ -0,0 +1,262 @@ +""" chat template function handler""" + +from jinja2 import Template + +def raise_exception(message): + """Function raise exception.""" + raise ValueError(message) + +def alpaca_template(messages, add_generation_prompt=True) -> str: + """Function alpaca template.""" + with open('chat_template/alpaca.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def amberchat_template(messages, add_generation_prompt=True) -> str: + """Function amberchat template.""" + with open('chat_template/amberchat.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def chatml_template(messages, add_generation_prompt=True) -> str: + """Function chatml template.""" + with open('chat_template/chatml.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def chatqa_template(messages, add_generation_prompt=True) -> str: + """Function chatqa template.""" + with open('chat_template/chatqa.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def falcon_template(messages, add_generation_prompt=True) -> str: + """Function falcon template.""" + with open('chat_template/falcon.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def gemma_template(messages, add_generation_prompt=True) -> str: + """Function gemma template.""" + with open('chat_template/gemma.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def llama_2_template(messages, add_generation_prompt=True) -> str: + """Function llama-2 template.""" + with open('chat_template/llama-2.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def llama_3_template(messages, add_generation_prompt=True): + """Function llama-3 template.""" + with open('chat_template/llama-3.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def mistral_template(messages, add_generation_prompt=True): + """Function mistral instruct template.""" + with open('chat_template/mistral.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def openchat_template(messages, add_generation_prompt=True) -> str: + """Function openchat template.""" + with open('chat_template/openchat.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def phi_3_template(messages, add_generation_prompt=True) -> str: + """Function phi-3 template.""" + with open('chat_template/phi-3.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def saiga_template(messages, add_generation_prompt=True) -> str: + """Function saiga template.""" + with open('chat_template/saiga.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def solar_template(messages, add_generation_prompt=True) -> str: + """Function solar template.""" + with open('chat_template/solar.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def vicuna_template(messages, add_generation_prompt=True) -> str: + """Function vicuna template.""" + with open('chat_template/vicuna.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() + +def zephyr_template(messages, add_generation_prompt=True) -> str: + """Function zephyr template.""" + with open('chat_template/zephyr.jinja', 'r', encoding="utf-8") as file: + chat_template = file.read() + + # Create a Jinja template object + template = Template(chat_template) + + # Render the template with the messages and add_generation_prompt + output = template.render( + messages=messages, + add_generation_prompt=add_generation_prompt, + raise_exception=raise_exception + ) + + return output.strip() diff --git a/src/llama_cpp_agent/chat_template/alpaca.jinja b/src/llama_cpp_agent/chat_template/alpaca.jinja new file mode 100644 index 0000000..def04b3 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/alpaca.jinja @@ -0,0 +1,24 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'].strip() + '\n\n' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{{ bos_token + system_message }} +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if message['role'] == 'user' %} + {{ '### Instruction:\n' + message['content'].strip() + '\n\n' }} + {% elif message['role'] == 'assistant' %} + {{ '### Response:\n' + message['content'].strip() + eos_token + '\n\n' }} + {% endif %} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '### Instruction:\n' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/amberchat.jinja b/src/llama_cpp_agent/chat_template/amberchat.jinja new file mode 100644 index 0000000..7b746b6 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/amberchat.jinja @@ -0,0 +1,26 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'].strip() + '\n\n' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{{ bos_token }} +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {% set content = system_message + message['content'] %} + {% else %} + {% set content = message['content'] %} + {% endif %} + + {% if message['role'] == 'user' %} + {{ '[INST] ' + content.strip() + ' [/INST]' }} + {% elif message['role'] == 'assistant' %} + {{ ' ' + content.strip() + eos_token }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/chatml.jinja b/src/llama_cpp_agent/chat_template/chatml.jinja new file mode 100644 index 0000000..eaf45a6 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/chatml.jinja @@ -0,0 +1,18 @@ +{% if messages[0]['role'] == 'system' %} + {% set offset = 1 %} +{% else %} + {% set offset = 0 %} +{% endif %} + +{{ bos_token }} +{% for message in messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {{ '<|im_start|>' + message['role'] + '\n' + message['content'].strip() + '<|im_end|>\n' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '<|im_start|>assistant\n' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/chatqa.jinja b/src/llama_cpp_agent/chat_template/chatqa.jinja new file mode 100644 index 0000000..714e864 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/chatqa.jinja @@ -0,0 +1,36 @@ +{{ bos_token }} +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = 'System: ' + messages[0]['content'].strip() %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{% if messages[0]['role'] == 'context' %} + {% set loop_messages = messages[1:] %} + {% set context_message = '\n\n' + messages[0]['content'].strip() %} +{% else %} + {% set loop_messages = messages %} + {% set context_message = '' %} +{% endif %} + +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {{ system_message + context_message }} + {% endif %} + + {% if message['role'] == 'user' %} + {{ '\n\nUser: ' + content.strip() }} + {% elif message['role'] == 'assistant' %} + {{ '\n\nAssistant: ' + content.strip() }} + {% endif %} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '\n\nAssistant:' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/falcon.jinja b/src/llama_cpp_agent/chat_template/falcon.jinja new file mode 100644 index 0000000..70f7cdf --- /dev/null +++ b/src/llama_cpp_agent/chat_template/falcon.jinja @@ -0,0 +1,22 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'] %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {{ system_message.strip() }} + {% endif %} + {{ '\n\n' + message['role'].title() + ': ' + message['content'].strip().replace('\r\n', '\n').replace('\n\n', '\n') }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '\n\nAssistant:' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/gemma.jinja b/src/llama_cpp_agent/chat_template/gemma.jinja new file mode 100644 index 0000000..331c9bc --- /dev/null +++ b/src/llama_cpp_agent/chat_template/gemma.jinja @@ -0,0 +1,31 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'].strip() + '\n\n' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {% set content = system_message + message['content'] %} + {% else %} + {% set content = message['content'] %} + {% endif %} + + {% if (message['role'] == 'assistant') %} + {% set role = 'model' %} + {% else %} + {% set role = message['role'] %} + {% endif %} + + {{ '' + role + '\n' + content.strip() + '\n' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{'model\n'}} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/llama-2.jinja b/src/llama_cpp_agent/chat_template/llama-2.jinja new file mode 100644 index 0000000..2dcc5b4 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/llama-2.jinja @@ -0,0 +1,25 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = '<>\n' + messages[0]['content'].strip() + '\n<>\n\n' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {% set content = system_message + message['content'] %} + {% else %} + {% set content = message['content'] %} + {% endif %} + + {% if message['role'] == 'user' %} + {{ bos_token + '[INST] ' + content.strip() + ' [/INST]' }} + {% elif message['role'] == 'assistant' %} + {{ ' ' + content.strip() + ' ' + eos_token }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/llama-3.jinja b/src/llama_cpp_agent/chat_template/llama-3.jinja new file mode 100644 index 0000000..70d294d --- /dev/null +++ b/src/llama_cpp_agent/chat_template/llama-3.jinja @@ -0,0 +1,24 @@ +{{ bos_token }} +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = '<|start_header_id|>' + 'system' + '<|end_header_id|>\n\n' + messages[0]['content'].strip() + '<|eot_id|>' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {{ system_message }} + {% endif %} + + {{ '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n' + message['content'].strip() + '<|eot_id|>' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '<|start_header_id|>' + 'assistant' + '<|end_header_id|>\n\n' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/mistral.jinja b/src/llama_cpp_agent/chat_template/mistral.jinja new file mode 100644 index 0000000..7b746b6 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/mistral.jinja @@ -0,0 +1,26 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'].strip() + '\n\n' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{{ bos_token }} +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {% set content = system_message + message['content'] %} + {% else %} + {% set content = message['content'] %} + {% endif %} + + {% if message['role'] == 'user' %} + {{ '[INST] ' + content.strip() + ' [/INST]' }} + {% elif message['role'] == 'assistant' %} + {{ ' ' + content.strip() + eos_token }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/openchat.jinja b/src/llama_cpp_agent/chat_template/openchat.jinja new file mode 100644 index 0000000..5004cd2 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/openchat.jinja @@ -0,0 +1,20 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'].strip() + '<|end_of_turn|>' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{{ bos_token + system_message }} +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ 'GPT4 Correct Assistant:' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/phi-3.jinja b/src/llama_cpp_agent/chat_template/phi-3.jinja new file mode 100644 index 0000000..842b60e --- /dev/null +++ b/src/llama_cpp_agent/chat_template/phi-3.jinja @@ -0,0 +1,17 @@ +{% if messages[0]['role'] == 'system' %} + {% set offset = 1 %} +{% else %} + {% set offset = 0 %} +{% endif %} + +{% for message in messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {{ '<|' + message['role'] + '|>\n' + message['content'].strip() + '<|end|>' + '\n' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '<|assistant|>\n' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/saiga.jinja b/src/llama_cpp_agent/chat_template/saiga.jinja new file mode 100644 index 0000000..671e94f --- /dev/null +++ b/src/llama_cpp_agent/chat_template/saiga.jinja @@ -0,0 +1,23 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = bos_token + 'system' + '\n' + messages[0]['content'].strip() + eos_token %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/bot/user/bot/...') }} + {% endif %} + + {% if loop.index0 == 0 %} + {{ system_message }} + {% endif %} + + {{ bos_token + message['role'] + '\n' + message['content'].strip() + eos_token }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ bos_token + 'bot' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/solar.jinja b/src/llama_cpp_agent/chat_template/solar.jinja new file mode 100644 index 0000000..2cf7fef --- /dev/null +++ b/src/llama_cpp_agent/chat_template/solar.jinja @@ -0,0 +1,18 @@ +{% if messages[0]['role'] == 'system' %} + {% set offset = 1 %} +{% else %} + {% set offset = 0 %} +{% endif %} + +{{ bos_token }} +{% for message in messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {{ '### ' + message['role'].title() + ':\n' + message['content'].strip() + '\n\n' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '### Assistant:\n' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/vicuna.jinja b/src/llama_cpp_agent/chat_template/vicuna.jinja new file mode 100644 index 0000000..bbed85a --- /dev/null +++ b/src/llama_cpp_agent/chat_template/vicuna.jinja @@ -0,0 +1,24 @@ +{% if messages[0]['role'] == 'system' %} + {% set loop_messages = messages[1:] %} + {% set system_message = messages[0]['content'].strip() + '\n\n' %} +{% else %} + {% set loop_messages = messages %} + {% set system_message = '' %} +{% endif %} + +{{ bos_token + system_message }} +{% for message in loop_messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {% if message['role'] == 'user' %} + {{ 'USER: ' + message['content'].strip() + '\n' }} + {% elif message['role'] == 'assistant' %} + {{ 'ASSISTANT: ' + message['content'].strip() + eos_token + '\n' }} + {% endif %} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ 'ASSISTANT:' }} + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/zephyr.jinja b/src/llama_cpp_agent/chat_template/zephyr.jinja new file mode 100644 index 0000000..dc70df8 --- /dev/null +++ b/src/llama_cpp_agent/chat_template/zephyr.jinja @@ -0,0 +1,17 @@ +{% if messages[0]['role'] == 'system' %} + {% set offset = 1 %} +{% else %} + {% set offset = 0 %} +{% endif %} + +{% for message in messages %} + {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} + {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} + {% endif %} + + {{ '<|' + message['role'] + '|>\n' + message['content'].strip() + eos_token + '\n' }} + + {% if loop.last and message['role'] == 'user' and add_generation_prompt %} + {{ '<|assistant|>\n' }} + {% endif %} +{% endfor %} \ No newline at end of file From a08ddafc3f4d85d3fc6e517357528691bf8ee8a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=CE=94BL=C3=98=20=E1=84=83=CE=9E?= Date: Mon, 6 May 2024 22:46:58 -0500 Subject: [PATCH 3/5] docs comments --- src/llama_cpp_agent/chat_template.py | 32 ++++++++++++++-------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/src/llama_cpp_agent/chat_template.py b/src/llama_cpp_agent/chat_template.py index 5dca324..86eec46 100644 --- a/src/llama_cpp_agent/chat_template.py +++ b/src/llama_cpp_agent/chat_template.py @@ -3,11 +3,11 @@ from jinja2 import Template def raise_exception(message): - """Function raise exception.""" + """raise exception.""" raise ValueError(message) def alpaca_template(messages, add_generation_prompt=True) -> str: - """Function alpaca template.""" + """Alpaca template.""" with open('chat_template/alpaca.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -24,7 +24,7 @@ def alpaca_template(messages, add_generation_prompt=True) -> str: return output.strip() def amberchat_template(messages, add_generation_prompt=True) -> str: - """Function amberchat template.""" + """Amberchat template.""" with open('chat_template/amberchat.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -41,7 +41,7 @@ def amberchat_template(messages, add_generation_prompt=True) -> str: return output.strip() def chatml_template(messages, add_generation_prompt=True) -> str: - """Function chatml template.""" + """Chatml template.""" with open('chat_template/chatml.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -58,7 +58,7 @@ def chatml_template(messages, add_generation_prompt=True) -> str: return output.strip() def chatqa_template(messages, add_generation_prompt=True) -> str: - """Function chatqa template.""" + """Chatqa template.""" with open('chat_template/chatqa.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -75,7 +75,7 @@ def chatqa_template(messages, add_generation_prompt=True) -> str: return output.strip() def falcon_template(messages, add_generation_prompt=True) -> str: - """Function falcon template.""" + """Falcon template.""" with open('chat_template/falcon.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -92,7 +92,7 @@ def falcon_template(messages, add_generation_prompt=True) -> str: return output.strip() def gemma_template(messages, add_generation_prompt=True) -> str: - """Function gemma template.""" + """Gemma template.""" with open('chat_template/gemma.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -109,7 +109,7 @@ def gemma_template(messages, add_generation_prompt=True) -> str: return output.strip() def llama_2_template(messages, add_generation_prompt=True) -> str: - """Function llama-2 template.""" + """Llama-2 template.""" with open('chat_template/llama-2.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -126,7 +126,7 @@ def llama_2_template(messages, add_generation_prompt=True) -> str: return output.strip() def llama_3_template(messages, add_generation_prompt=True): - """Function llama-3 template.""" + """Llama-3 template.""" with open('chat_template/llama-3.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -143,7 +143,7 @@ def llama_3_template(messages, add_generation_prompt=True): return output.strip() def mistral_template(messages, add_generation_prompt=True): - """Function mistral instruct template.""" + """Mistral instruct template.""" with open('chat_template/mistral.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -160,7 +160,7 @@ def mistral_template(messages, add_generation_prompt=True): return output.strip() def openchat_template(messages, add_generation_prompt=True) -> str: - """Function openchat template.""" + """Openchat template.""" with open('chat_template/openchat.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -177,7 +177,7 @@ def openchat_template(messages, add_generation_prompt=True) -> str: return output.strip() def phi_3_template(messages, add_generation_prompt=True) -> str: - """Function phi-3 template.""" + """Phi-3 template.""" with open('chat_template/phi-3.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -194,7 +194,7 @@ def phi_3_template(messages, add_generation_prompt=True) -> str: return output.strip() def saiga_template(messages, add_generation_prompt=True) -> str: - """Function saiga template.""" + """Saiga template.""" with open('chat_template/saiga.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -211,7 +211,7 @@ def saiga_template(messages, add_generation_prompt=True) -> str: return output.strip() def solar_template(messages, add_generation_prompt=True) -> str: - """Function solar template.""" + """Solar template.""" with open('chat_template/solar.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -228,7 +228,7 @@ def solar_template(messages, add_generation_prompt=True) -> str: return output.strip() def vicuna_template(messages, add_generation_prompt=True) -> str: - """Function vicuna template.""" + """Vicuna template.""" with open('chat_template/vicuna.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() @@ -245,7 +245,7 @@ def vicuna_template(messages, add_generation_prompt=True) -> str: return output.strip() def zephyr_template(messages, add_generation_prompt=True) -> str: - """Function zephyr template.""" + """Zephyr template.""" with open('chat_template/zephyr.jinja', 'r', encoding="utf-8") as file: chat_template = file.read() From 3a872aa627609bf4b7ea8d04088007f0202c19d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=CE=94BL=C3=98=20=E1=84=83=CE=9E?= Date: Tue, 7 May 2024 09:37:38 -0500 Subject: [PATCH 4/5] add: PR event --- .github/workflows/python-ci.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml index a37528e..c0d4aa7 100644 --- a/.github/workflows/python-ci.yml +++ b/.github/workflows/python-ci.yml @@ -2,8 +2,9 @@ name: Python-CI on: push: - branches: - - master + branches: [ "master" ] + pull_request: + branches: [ "master" ] release: types: [published] From 1073f7adab2294a86f698717019c97ceb51e0f8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=CE=94BL=C3=98=20=E1=84=83=CE=9E?= Date: Tue, 7 May 2024 09:38:57 -0500 Subject: [PATCH 5/5] remove chat template for PR --- src/llama_cpp_agent/chat_template.py | 262 ------------------ .../chat_template/alpaca.jinja | 24 -- .../chat_template/amberchat.jinja | 26 -- .../chat_template/chatml.jinja | 18 -- .../chat_template/chatqa.jinja | 36 --- .../chat_template/falcon.jinja | 22 -- src/llama_cpp_agent/chat_template/gemma.jinja | 31 --- .../chat_template/llama-2.jinja | 25 -- .../chat_template/llama-3.jinja | 24 -- .../chat_template/mistral.jinja | 26 -- .../chat_template/openchat.jinja | 20 -- src/llama_cpp_agent/chat_template/phi-3.jinja | 17 -- src/llama_cpp_agent/chat_template/saiga.jinja | 23 -- src/llama_cpp_agent/chat_template/solar.jinja | 18 -- .../chat_template/vicuna.jinja | 24 -- .../chat_template/zephyr.jinja | 17 -- 16 files changed, 613 deletions(-) delete mode 100644 src/llama_cpp_agent/chat_template.py delete mode 100644 src/llama_cpp_agent/chat_template/alpaca.jinja delete mode 100644 src/llama_cpp_agent/chat_template/amberchat.jinja delete mode 100644 src/llama_cpp_agent/chat_template/chatml.jinja delete mode 100644 src/llama_cpp_agent/chat_template/chatqa.jinja delete mode 100644 src/llama_cpp_agent/chat_template/falcon.jinja delete mode 100644 src/llama_cpp_agent/chat_template/gemma.jinja delete mode 100644 src/llama_cpp_agent/chat_template/llama-2.jinja delete mode 100644 src/llama_cpp_agent/chat_template/llama-3.jinja delete mode 100644 src/llama_cpp_agent/chat_template/mistral.jinja delete mode 100644 src/llama_cpp_agent/chat_template/openchat.jinja delete mode 100644 src/llama_cpp_agent/chat_template/phi-3.jinja delete mode 100644 src/llama_cpp_agent/chat_template/saiga.jinja delete mode 100644 src/llama_cpp_agent/chat_template/solar.jinja delete mode 100644 src/llama_cpp_agent/chat_template/vicuna.jinja delete mode 100644 src/llama_cpp_agent/chat_template/zephyr.jinja diff --git a/src/llama_cpp_agent/chat_template.py b/src/llama_cpp_agent/chat_template.py deleted file mode 100644 index 86eec46..0000000 --- a/src/llama_cpp_agent/chat_template.py +++ /dev/null @@ -1,262 +0,0 @@ -""" chat template function handler""" - -from jinja2 import Template - -def raise_exception(message): - """raise exception.""" - raise ValueError(message) - -def alpaca_template(messages, add_generation_prompt=True) -> str: - """Alpaca template.""" - with open('chat_template/alpaca.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def amberchat_template(messages, add_generation_prompt=True) -> str: - """Amberchat template.""" - with open('chat_template/amberchat.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def chatml_template(messages, add_generation_prompt=True) -> str: - """Chatml template.""" - with open('chat_template/chatml.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def chatqa_template(messages, add_generation_prompt=True) -> str: - """Chatqa template.""" - with open('chat_template/chatqa.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def falcon_template(messages, add_generation_prompt=True) -> str: - """Falcon template.""" - with open('chat_template/falcon.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def gemma_template(messages, add_generation_prompt=True) -> str: - """Gemma template.""" - with open('chat_template/gemma.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def llama_2_template(messages, add_generation_prompt=True) -> str: - """Llama-2 template.""" - with open('chat_template/llama-2.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def llama_3_template(messages, add_generation_prompt=True): - """Llama-3 template.""" - with open('chat_template/llama-3.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def mistral_template(messages, add_generation_prompt=True): - """Mistral instruct template.""" - with open('chat_template/mistral.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def openchat_template(messages, add_generation_prompt=True) -> str: - """Openchat template.""" - with open('chat_template/openchat.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def phi_3_template(messages, add_generation_prompt=True) -> str: - """Phi-3 template.""" - with open('chat_template/phi-3.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def saiga_template(messages, add_generation_prompt=True) -> str: - """Saiga template.""" - with open('chat_template/saiga.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def solar_template(messages, add_generation_prompt=True) -> str: - """Solar template.""" - with open('chat_template/solar.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def vicuna_template(messages, add_generation_prompt=True) -> str: - """Vicuna template.""" - with open('chat_template/vicuna.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() - -def zephyr_template(messages, add_generation_prompt=True) -> str: - """Zephyr template.""" - with open('chat_template/zephyr.jinja', 'r', encoding="utf-8") as file: - chat_template = file.read() - - # Create a Jinja template object - template = Template(chat_template) - - # Render the template with the messages and add_generation_prompt - output = template.render( - messages=messages, - add_generation_prompt=add_generation_prompt, - raise_exception=raise_exception - ) - - return output.strip() diff --git a/src/llama_cpp_agent/chat_template/alpaca.jinja b/src/llama_cpp_agent/chat_template/alpaca.jinja deleted file mode 100644 index def04b3..0000000 --- a/src/llama_cpp_agent/chat_template/alpaca.jinja +++ /dev/null @@ -1,24 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'].strip() + '\n\n' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{{ bos_token + system_message }} -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if message['role'] == 'user' %} - {{ '### Instruction:\n' + message['content'].strip() + '\n\n' }} - {% elif message['role'] == 'assistant' %} - {{ '### Response:\n' + message['content'].strip() + eos_token + '\n\n' }} - {% endif %} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '### Instruction:\n' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/amberchat.jinja b/src/llama_cpp_agent/chat_template/amberchat.jinja deleted file mode 100644 index 7b746b6..0000000 --- a/src/llama_cpp_agent/chat_template/amberchat.jinja +++ /dev/null @@ -1,26 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'].strip() + '\n\n' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{{ bos_token }} -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {% set content = system_message + message['content'] %} - {% else %} - {% set content = message['content'] %} - {% endif %} - - {% if message['role'] == 'user' %} - {{ '[INST] ' + content.strip() + ' [/INST]' }} - {% elif message['role'] == 'assistant' %} - {{ ' ' + content.strip() + eos_token }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/chatml.jinja b/src/llama_cpp_agent/chat_template/chatml.jinja deleted file mode 100644 index eaf45a6..0000000 --- a/src/llama_cpp_agent/chat_template/chatml.jinja +++ /dev/null @@ -1,18 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set offset = 1 %} -{% else %} - {% set offset = 0 %} -{% endif %} - -{{ bos_token }} -{% for message in messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {{ '<|im_start|>' + message['role'] + '\n' + message['content'].strip() + '<|im_end|>\n' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '<|im_start|>assistant\n' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/chatqa.jinja b/src/llama_cpp_agent/chat_template/chatqa.jinja deleted file mode 100644 index 714e864..0000000 --- a/src/llama_cpp_agent/chat_template/chatqa.jinja +++ /dev/null @@ -1,36 +0,0 @@ -{{ bos_token }} -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = 'System: ' + messages[0]['content'].strip() %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{% if messages[0]['role'] == 'context' %} - {% set loop_messages = messages[1:] %} - {% set context_message = '\n\n' + messages[0]['content'].strip() %} -{% else %} - {% set loop_messages = messages %} - {% set context_message = '' %} -{% endif %} - -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {{ system_message + context_message }} - {% endif %} - - {% if message['role'] == 'user' %} - {{ '\n\nUser: ' + content.strip() }} - {% elif message['role'] == 'assistant' %} - {{ '\n\nAssistant: ' + content.strip() }} - {% endif %} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '\n\nAssistant:' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/falcon.jinja b/src/llama_cpp_agent/chat_template/falcon.jinja deleted file mode 100644 index 70f7cdf..0000000 --- a/src/llama_cpp_agent/chat_template/falcon.jinja +++ /dev/null @@ -1,22 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'] %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {{ system_message.strip() }} - {% endif %} - {{ '\n\n' + message['role'].title() + ': ' + message['content'].strip().replace('\r\n', '\n').replace('\n\n', '\n') }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '\n\nAssistant:' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/gemma.jinja b/src/llama_cpp_agent/chat_template/gemma.jinja deleted file mode 100644 index 331c9bc..0000000 --- a/src/llama_cpp_agent/chat_template/gemma.jinja +++ /dev/null @@ -1,31 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'].strip() + '\n\n' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {% set content = system_message + message['content'] %} - {% else %} - {% set content = message['content'] %} - {% endif %} - - {% if (message['role'] == 'assistant') %} - {% set role = 'model' %} - {% else %} - {% set role = message['role'] %} - {% endif %} - - {{ '' + role + '\n' + content.strip() + '\n' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{'model\n'}} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/llama-2.jinja b/src/llama_cpp_agent/chat_template/llama-2.jinja deleted file mode 100644 index 2dcc5b4..0000000 --- a/src/llama_cpp_agent/chat_template/llama-2.jinja +++ /dev/null @@ -1,25 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = '<>\n' + messages[0]['content'].strip() + '\n<>\n\n' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {% set content = system_message + message['content'] %} - {% else %} - {% set content = message['content'] %} - {% endif %} - - {% if message['role'] == 'user' %} - {{ bos_token + '[INST] ' + content.strip() + ' [/INST]' }} - {% elif message['role'] == 'assistant' %} - {{ ' ' + content.strip() + ' ' + eos_token }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/llama-3.jinja b/src/llama_cpp_agent/chat_template/llama-3.jinja deleted file mode 100644 index 70d294d..0000000 --- a/src/llama_cpp_agent/chat_template/llama-3.jinja +++ /dev/null @@ -1,24 +0,0 @@ -{{ bos_token }} -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = '<|start_header_id|>' + 'system' + '<|end_header_id|>\n\n' + messages[0]['content'].strip() + '<|eot_id|>' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {{ system_message }} - {% endif %} - - {{ '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n' + message['content'].strip() + '<|eot_id|>' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '<|start_header_id|>' + 'assistant' + '<|end_header_id|>\n\n' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/mistral.jinja b/src/llama_cpp_agent/chat_template/mistral.jinja deleted file mode 100644 index 7b746b6..0000000 --- a/src/llama_cpp_agent/chat_template/mistral.jinja +++ /dev/null @@ -1,26 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'].strip() + '\n\n' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{{ bos_token }} -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {% set content = system_message + message['content'] %} - {% else %} - {% set content = message['content'] %} - {% endif %} - - {% if message['role'] == 'user' %} - {{ '[INST] ' + content.strip() + ' [/INST]' }} - {% elif message['role'] == 'assistant' %} - {{ ' ' + content.strip() + eos_token }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/openchat.jinja b/src/llama_cpp_agent/chat_template/openchat.jinja deleted file mode 100644 index 5004cd2..0000000 --- a/src/llama_cpp_agent/chat_template/openchat.jinja +++ /dev/null @@ -1,20 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'].strip() + '<|end_of_turn|>' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{{ bos_token + system_message }} -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ 'GPT4 Correct Assistant:' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/phi-3.jinja b/src/llama_cpp_agent/chat_template/phi-3.jinja deleted file mode 100644 index 842b60e..0000000 --- a/src/llama_cpp_agent/chat_template/phi-3.jinja +++ /dev/null @@ -1,17 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set offset = 1 %} -{% else %} - {% set offset = 0 %} -{% endif %} - -{% for message in messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {{ '<|' + message['role'] + '|>\n' + message['content'].strip() + '<|end|>' + '\n' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '<|assistant|>\n' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/saiga.jinja b/src/llama_cpp_agent/chat_template/saiga.jinja deleted file mode 100644 index 671e94f..0000000 --- a/src/llama_cpp_agent/chat_template/saiga.jinja +++ /dev/null @@ -1,23 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = bos_token + 'system' + '\n' + messages[0]['content'].strip() + eos_token %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/bot/user/bot/...') }} - {% endif %} - - {% if loop.index0 == 0 %} - {{ system_message }} - {% endif %} - - {{ bos_token + message['role'] + '\n' + message['content'].strip() + eos_token }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ bos_token + 'bot' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/solar.jinja b/src/llama_cpp_agent/chat_template/solar.jinja deleted file mode 100644 index 2cf7fef..0000000 --- a/src/llama_cpp_agent/chat_template/solar.jinja +++ /dev/null @@ -1,18 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set offset = 1 %} -{% else %} - {% set offset = 0 %} -{% endif %} - -{{ bos_token }} -{% for message in messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {{ '### ' + message['role'].title() + ':\n' + message['content'].strip() + '\n\n' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '### Assistant:\n' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/vicuna.jinja b/src/llama_cpp_agent/chat_template/vicuna.jinja deleted file mode 100644 index bbed85a..0000000 --- a/src/llama_cpp_agent/chat_template/vicuna.jinja +++ /dev/null @@ -1,24 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set loop_messages = messages[1:] %} - {% set system_message = messages[0]['content'].strip() + '\n\n' %} -{% else %} - {% set loop_messages = messages %} - {% set system_message = '' %} -{% endif %} - -{{ bos_token + system_message }} -{% for message in loop_messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {% if message['role'] == 'user' %} - {{ 'USER: ' + message['content'].strip() + '\n' }} - {% elif message['role'] == 'assistant' %} - {{ 'ASSISTANT: ' + message['content'].strip() + eos_token + '\n' }} - {% endif %} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ 'ASSISTANT:' }} - {% endif %} -{% endfor %} \ No newline at end of file diff --git a/src/llama_cpp_agent/chat_template/zephyr.jinja b/src/llama_cpp_agent/chat_template/zephyr.jinja deleted file mode 100644 index dc70df8..0000000 --- a/src/llama_cpp_agent/chat_template/zephyr.jinja +++ /dev/null @@ -1,17 +0,0 @@ -{% if messages[0]['role'] == 'system' %} - {% set offset = 1 %} -{% else %} - {% set offset = 0 %} -{% endif %} - -{% for message in messages %} - {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %} - {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }} - {% endif %} - - {{ '<|' + message['role'] + '|>\n' + message['content'].strip() + eos_token + '\n' }} - - {% if loop.last and message['role'] == 'user' and add_generation_prompt %} - {{ '<|assistant|>\n' }} - {% endif %} -{% endfor %} \ No newline at end of file