Skip to content

Commit

Permalink
Refactor and make use of jinja templates.
Browse files Browse the repository at this point in the history
Signed-off-by: Adam Treat <[email protected]>
  • Loading branch information
manyoso committed Aug 12, 2024
1 parent 0d56401 commit b10d96f
Show file tree
Hide file tree
Showing 14 changed files with 245 additions and 121 deletions.
2 changes: 1 addition & 1 deletion gpt4all-chat/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ else()
PRIVATE Qt6::Quick Qt6::Svg Qt6::HttpServer Qt6::Sql Qt6::Pdf)
endif()
target_link_libraries(chat
PRIVATE llmodel)
PRIVATE llmodel jinja2cpp)


# -- install --
Expand Down
50 changes: 50 additions & 0 deletions gpt4all-chat/bravesearch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,56 @@ QString BraveSearch::run(const QJsonObject &parameters, qint64 timeout)
return worker.response();
}

QJsonObject BraveSearch::paramSchema() const
{
static const QString braveParamSchema = R"({
"apiKey": {
"type": "string",
"description": "The api key to use",
"required": true,
"modelGenerated": false,
"userConfigured": true
},
"query": {
"type": "string",
"description": "The query to search",
"required": true
},
"count": {
"type": "integer",
"description": "The number of excerpts to return",
"required": true,
"modelGenerated": false
}
})";

static const QJsonDocument braveJsonDoc = QJsonDocument::fromJson(braveParamSchema.toUtf8());
Q_ASSERT(!braveJsonDoc.isNull() && braveJsonDoc.isObject());
return braveJsonDoc.object();
}

QJsonObject BraveSearch::exampleParams() const
{
static const QString example = R"({
"query": "the 44th president of the United States"
})";
static const QJsonDocument exampleDoc = QJsonDocument::fromJson(example.toUtf8());
Q_ASSERT(!exampleDoc.isNull() && exampleDoc.isObject());
return exampleDoc.object();
}

bool BraveSearch::isEnabled() const
{
// FIXME: Refer to mysettings
return true;
}

bool BraveSearch::forceUsage() const
{
// FIXME: Refer to mysettings
return false;
}

void BraveAPIWorker::request(const QString &apiKey, const QString &query, int count)
{
// Documentation on the brave web search:
Expand Down
10 changes: 10 additions & 0 deletions gpt4all-chat/bravesearch.h
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,16 @@ class BraveSearch : public Tool {
ToolEnums::Error error() const override { return m_error; }
QString errorString() const override { return m_errorString; }

QString name() const override { return tr("Brave web search"); }
QString description() const override { return tr("Search the web using brave"); }
QString function() const override { return "brave_search"; }
QJsonObject paramSchema() const override;
QJsonObject exampleParams() const override;
bool isEnabled() const override;
bool isBuiltin() const override { return true; }
bool forceUsage() const override;
bool excerpts() const override { return true; }

private:
ToolEnums::Error m_error;
QString m_errorString;
Expand Down
33 changes: 32 additions & 1 deletion gpt4all-chat/chatllm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
#include "localdocssearch.h"
#include "mysettings.h"
#include "network.h"
#include "tool.h"
#include "toolmodel.h"

#include <QDataStream>
#include <QDebug>
Expand All @@ -29,6 +31,7 @@
#include <cmath>
#include <cstddef>
#include <functional>
#include <jinja2cpp/template.h>
#include <limits>
#include <optional>
#include <string_view>
Expand Down Expand Up @@ -1332,7 +1335,35 @@ void ChatLLM::processSystemPrompt()
if (!isModelLoaded() || m_processedSystemPrompt || m_restoreStateFromText || m_isServer)
return;

const std::string systemPrompt = MySettings::globalInstance()->modelSystemPrompt(m_modelInfo).toStdString();
const std::string systemPromptTemplate = MySettings::globalInstance()->modelSystemPromptTemplate(m_modelInfo).toStdString();

// FIXME: This needs to be moved to settings probably and the same code used for validation
jinja2::ValuesMap params;
params.insert({"currentDate", QDate::currentDate().toString().toStdString()});

jinja2::ValuesList toolList;
int c = ToolModel::globalInstance()->count();
for (int i = 0; i < c; ++i) {
Tool *t = ToolModel::globalInstance()->get(i);
if (t->isEnabled() && !t->forceUsage())
toolList.push_back(t->jinjaValue());
}
params.insert({"toolList", toolList});

std::string systemPrompt;

jinja2::Template t;
t.Load(systemPromptTemplate);
const auto renderResult = t.RenderAsString(params);

// The GUI should not allow setting an improper template, but it is always possible someone hand
// edits the settings file to produce an improper one.
Q_ASSERT(renderResult);
if (renderResult)
systemPrompt = renderResult.value();
else
qWarning() << "ERROR: Could not parse system prompt template:" << renderResult.error().ToString();

if (QString::fromStdString(systemPrompt).trimmed().isEmpty()) {
m_processedSystemPrompt = true;
return;
Expand Down
32 changes: 32 additions & 0 deletions gpt4all-chat/localdocssearch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include <QDebug>
#include <QGuiApplication>
#include <QJsonArray>
#include <QJsonDocument>
#include <QJsonObject>
#include <QThread>

Expand Down Expand Up @@ -39,6 +40,37 @@ QString LocalDocsSearch::run(const QJsonObject &parameters, qint64 timeout)
return worker.response();
}

QJsonObject LocalDocsSearch::paramSchema() const
{
static const QString localParamSchema = R"({
"collections": {
"type": "array",
"items": {
"type": "string"
},
"description": "The collections to search",
"required": true,
"modelGenerated": false,
"userConfigured": false
},
"query": {
"type": "string",
"description": "The query to search",
"required": true
},
"count": {
"type": "integer",
"description": "The number of excerpts to return",
"required": true,
"modelGenerated": false
}
})";

static const QJsonDocument localJsonDoc = QJsonDocument::fromJson(localParamSchema.toUtf8());
Q_ASSERT(!localJsonDoc.isNull() && localJsonDoc.isObject());
return localJsonDoc.object();
}

LocalDocsWorker::LocalDocsWorker()
: QObject(nullptr)
{
Expand Down
9 changes: 9 additions & 0 deletions gpt4all-chat/localdocssearch.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,15 @@ class LocalDocsSearch : public Tool {
ToolEnums::Error error() const override { return m_error; }
QString errorString() const override { return m_errorString; }

QString name() const override { return tr("LocalDocs search"); }
QString description() const override { return tr("Search the local docs"); }
QString function() const override { return "localdocs_search"; }
QJsonObject paramSchema() const override;
bool isEnabled() const override { return true; }
bool isBuiltin() const override { return true; }
bool forceUsage() const override { return true; }
bool excerpts() const override { return true; }

private:
ToolEnums::Error m_error;
QString m_errorString;
Expand Down
18 changes: 9 additions & 9 deletions gpt4all-chat/modellist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -334,15 +334,15 @@ void ModelInfo::setToolTemplate(const QString &t)
m_toolTemplate = t;
}

QString ModelInfo::systemPrompt() const
QString ModelInfo::systemPromptTemplate() const
{
return MySettings::globalInstance()->modelSystemPrompt(*this);
return MySettings::globalInstance()->modelSystemPromptTemplate(*this);
}

void ModelInfo::setSystemPrompt(const QString &p)
void ModelInfo::setSystemPromptTemplate(const QString &p)
{
if (shouldSaveMetadata()) MySettings::globalInstance()->setModelSystemPrompt(*this, p, true /*force*/);
m_systemPrompt = p;
if (shouldSaveMetadata()) MySettings::globalInstance()->setModelSystemPromptTemplate(*this, p, true /*force*/);
m_systemPromptTemplate = p;
}

QString ModelInfo::chatNamePrompt() const
Expand Down Expand Up @@ -397,7 +397,7 @@ QVariantMap ModelInfo::getFields() const
{ "repeatPenaltyTokens", m_repeatPenaltyTokens },
{ "promptTemplate", m_promptTemplate },
{ "toolTemplate", m_toolTemplate },
{ "systemPrompt", m_systemPrompt },
{ "systemPromptTemplate",m_systemPromptTemplate },
{ "chatNamePrompt", m_chatNamePrompt },
{ "suggestedFollowUpPrompt", m_suggestedFollowUpPrompt },
};
Expand Down Expand Up @@ -792,7 +792,7 @@ QVariant ModelList::dataInternal(const ModelInfo *info, int role) const
case ToolTemplateRole:
return info->toolTemplate();
case SystemPromptRole:
return info->systemPrompt();
return info->systemPromptTemplate();
case ChatNamePromptRole:
return info->chatNamePrompt();
case SuggestedFollowUpPromptRole:
Expand Down Expand Up @@ -970,7 +970,7 @@ void ModelList::updateData(const QString &id, const QVector<QPair<int, QVariant>
case ToolTemplateRole:
info->setToolTemplate(value.toString()); break;
case SystemPromptRole:
info->setSystemPrompt(value.toString()); break;
info->setSystemPromptTemplate(value.toString()); break;
case ChatNamePromptRole:
info->setChatNamePrompt(value.toString()); break;
case SuggestedFollowUpPromptRole:
Expand Down Expand Up @@ -1125,7 +1125,7 @@ QString ModelList::clone(const ModelInfo &model)
{ ModelList::RepeatPenaltyTokensRole, model.repeatPenaltyTokens() },
{ ModelList::PromptTemplateRole, model.promptTemplate() },
{ ModelList::ToolTemplateRole, model.toolTemplate() },
{ ModelList::SystemPromptRole, model.systemPrompt() },
{ ModelList::SystemPromptRole, model.systemPromptTemplate() },
{ ModelList::ChatNamePromptRole, model.chatNamePrompt() },
{ ModelList::SuggestedFollowUpPromptRole, model.suggestedFollowUpPrompt() },
};
Expand Down
9 changes: 5 additions & 4 deletions gpt4all-chat/modellist.h
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ struct ModelInfo {
Q_PROPERTY(int repeatPenaltyTokens READ repeatPenaltyTokens WRITE setRepeatPenaltyTokens)
Q_PROPERTY(QString promptTemplate READ promptTemplate WRITE setPromptTemplate)
Q_PROPERTY(QString toolTemplate READ toolTemplate WRITE setToolTemplate)
Q_PROPERTY(QString systemPrompt READ systemPrompt WRITE setSystemPrompt)
Q_PROPERTY(QString systemPromptTemplate READ systemPromptTemplate WRITE setSystemPromptTemplate)
Q_PROPERTY(QString chatNamePrompt READ chatNamePrompt WRITE setChatNamePrompt)
Q_PROPERTY(QString suggestedFollowUpPrompt READ suggestedFollowUpPrompt WRITE setSuggestedFollowUpPrompt)
Q_PROPERTY(int likes READ likes WRITE setLikes)
Expand Down Expand Up @@ -181,8 +181,9 @@ struct ModelInfo {
void setPromptTemplate(const QString &t);
QString toolTemplate() const;
void setToolTemplate(const QString &t);
QString systemPrompt() const;
void setSystemPrompt(const QString &p);
QString systemPromptTemplate() const;
void setSystemPromptTemplate(const QString &p);
// FIXME (adam): The chatname and suggested follow-up should also be templates I guess?
QString chatNamePrompt() const;
void setChatNamePrompt(const QString &p);
QString suggestedFollowUpPrompt() const;
Expand Down Expand Up @@ -219,7 +220,7 @@ struct ModelInfo {
int m_repeatPenaltyTokens = 64;
QString m_promptTemplate = "### Human:\n%1\n\n### Assistant:\n";
QString m_toolTemplate = "";
QString m_systemPrompt = "### System:\nYou are an AI assistant who gives a quality response to whatever humans ask of you.\n\n";
QString m_systemPromptTemplate = "### System:\nYou are an AI assistant who gives a quality response to whatever humans ask of you.\n\n";
QString m_chatNamePrompt = "Describe the above conversation in seven words or less.";
QString m_suggestedFollowUpPrompt = "Suggest three very short factual follow-up questions that have not been answered yet or cannot be found inspired by the previous conversation and excerpts.";
friend class MySettings;
Expand Down
6 changes: 3 additions & 3 deletions gpt4all-chat/mysettings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ void MySettings::restoreModelDefaults(const ModelInfo &info)
setModelRepeatPenaltyTokens(info, info.m_repeatPenaltyTokens);
setModelPromptTemplate(info, info.m_promptTemplate);
setModelToolTemplate(info, info.m_toolTemplate);
setModelSystemPrompt(info, info.m_systemPrompt);
setModelSystemPromptTemplate(info, info.m_systemPromptTemplate);
setModelChatNamePrompt(info, info.m_chatNamePrompt);
setModelSuggestedFollowUpPrompt(info, info.m_suggestedFollowUpPrompt);
}
Expand Down Expand Up @@ -297,7 +297,7 @@ double MySettings::modelRepeatPenalty (const ModelInfo &info) const
int MySettings::modelRepeatPenaltyTokens (const ModelInfo &info) const { return getModelSetting("repeatPenaltyTokens", info).toInt(); }
QString MySettings::modelPromptTemplate (const ModelInfo &info) const { return getModelSetting("promptTemplate", info).toString(); }
QString MySettings::modelToolTemplate (const ModelInfo &info) const { return getModelSetting("toolTemplate", info).toString(); }
QString MySettings::modelSystemPrompt (const ModelInfo &info) const { return getModelSetting("systemPrompt", info).toString(); }
QString MySettings::modelSystemPromptTemplate (const ModelInfo &info) const { return getModelSetting("systemPrompt", info).toString(); }
QString MySettings::modelChatNamePrompt (const ModelInfo &info) const { return getModelSetting("chatNamePrompt", info).toString(); }
QString MySettings::modelSuggestedFollowUpPrompt(const ModelInfo &info) const { return getModelSetting("suggestedFollowUpPrompt", info).toString(); }

Expand Down Expand Up @@ -411,7 +411,7 @@ void MySettings::setModelToolTemplate(const ModelInfo &info, const QString &valu
setModelSetting("toolTemplate", info, value, force, true);
}

void MySettings::setModelSystemPrompt(const ModelInfo &info, const QString &value, bool force)
void MySettings::setModelSystemPromptTemplate(const ModelInfo &info, const QString &value, bool force)
{
setModelSetting("systemPrompt", info, value, force, true);
}
Expand Down
4 changes: 2 additions & 2 deletions gpt4all-chat/mysettings.h
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,8 @@ class MySettings : public QObject
Q_INVOKABLE void setModelPromptTemplate(const ModelInfo &info, const QString &value, bool force = false);
QString modelToolTemplate(const ModelInfo &info) const;
Q_INVOKABLE void setModelToolTemplate(const ModelInfo &info, const QString &value, bool force = false);
QString modelSystemPrompt(const ModelInfo &info) const;
Q_INVOKABLE void setModelSystemPrompt(const ModelInfo &info, const QString &value, bool force = false);
QString modelSystemPromptTemplate(const ModelInfo &info) const;
Q_INVOKABLE void setModelSystemPromptTemplate(const ModelInfo &info, const QString &value, bool force = false);
int modelContextLength(const ModelInfo &info) const;
Q_INVOKABLE void setModelContextLength(const ModelInfo &info, int value, bool force = false);
int modelGpuLayers(const ModelInfo &info) const;
Expand Down
30 changes: 30 additions & 0 deletions gpt4all-chat/tool.cpp
Original file line number Diff line number Diff line change
@@ -1 +1,31 @@
#include "tool.h"

#include <QJsonDocument>

QJsonObject filterModelGeneratedProperties(const QJsonObject &inputObject) {
QJsonObject filteredObject;
for (const QString &key : inputObject.keys()) {
QJsonObject propertyObject = inputObject.value(key).toObject();
if (!propertyObject.contains("modelGenerated") || propertyObject["modelGenerated"].toBool())
filteredObject.insert(key, propertyObject);
}
return filteredObject;
}

jinja2::Value Tool::jinjaValue() const
{
QJsonDocument doc(filterModelGeneratedProperties(paramSchema()));
QString p(doc.toJson(QJsonDocument::Compact));

QJsonDocument exampleDoc(exampleParams());
QString e(exampleDoc.toJson(QJsonDocument::Compact));

jinja2::ValuesMap params {
{ "name", name().toStdString() },
{ "description", description().toStdString() },
{ "function", function().toStdString() },
{ "paramSchema", p.toStdString() },
{ "exampleParams", e.toStdString() }
};
return params;
}
Loading

0 comments on commit b10d96f

Please sign in to comment.