Skip to content

Commit f20e24d

Browse files
committed
Add alternate openai provider, remove project header and fix documents default page size
1 parent 28bb6ec commit f20e24d

39 files changed

+362
-284
lines changed

config/runtime.exs

+6-1
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,13 @@ config :accent, Accent.MachineTranslations,
105105
}
106106

107107
config :accent, Accent.Prompts,
108+
use_provider_by_default: get_env("AI_ASSISTANT_USE_DEFAULT_PROVIDER", :string),
108109
default_providers_config: %{
109-
"openai" => %{"key" => get_env("OPENAI_API_KEY")}
110+
"openai" => %{
111+
"key" => get_env("OPENAI_API_KEY"),
112+
"model" => get_env("OPENAI_API_MODEL") || "gpt-4o",
113+
"base_url" => get_env("OPENAI_API_BASE_URL") || "https://api.openai.com/v1/"
114+
}
110115
}
111116

112117
config :accent, Accent.WebappView,

lib/accent/auth/role_abilities.ex

+2-1
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,8 @@ defmodule Accent.RoleAbilities do
135135
end
136136

137137
def can?(_role, :use_prompt_improve_text, project) do
138-
Accent.Prompts.enabled?(project.prompt_config)
138+
config = Accent.Prompts.config_or_default(project.prompt_config)
139+
Accent.Prompts.enabled?(config)
139140
end
140141

141142
# Define abilities function at compile time to remove list lookup at runtime

lib/graphql/resolvers/prompt.ex

+23-5
Original file line numberDiff line numberDiff line change
@@ -10,17 +10,18 @@ defmodule Accent.GraphQL.Resolvers.Prompt do
1010
@spec improve_text(Accent.Prompt.t(), any(), GraphQLContext.t()) ::
1111
{:ok, %{provider: atom(), text: String.t(), error: String.t() | nil}}
1212
def improve_text(prompt, args, _info) do
13+
config = Prompts.config_or_default(prompt.project.prompt_config)
14+
1315
result = %{
1416
text: nil,
15-
error: nil,
16-
provider: Prompts.id_from_config(prompt.project.prompt_config)
17+
errors: nil,
18+
provider: Prompts.id_from_config(config)
1719
}
1820

1921
result =
20-
case Prompts.completions(prompt, args.text, prompt.project.prompt_config) do
22+
case Prompts.completions(prompt, args.text, config) do
2123
[%{text: text} | _] -> %{result | text: text}
22-
{:error, error} when is_atom(error) -> %{result | error: to_string(error)}
23-
_ -> result
24+
_ -> %{result | text: "", errors: ["internal_server_error"]}
2425
end
2526

2627
{:ok, result}
@@ -84,4 +85,21 @@ defmodule Accent.GraphQL.Resolvers.Prompt do
8485
{:ok, %{prompt: nil, errors: ["unprocessable_entity"]}}
8586
end
8687
end
88+
89+
@spec project_config(Project.t(), any(), GraphQLContext.t()) ::
90+
{:ok, %{provider: String.t(), use_platform: boolean(), use_config_key: boolean()} | nil}
91+
def project_config(project, _, _) do
92+
config = Prompts.config_or_default(project.prompt_config)
93+
94+
if is_nil(config) do
95+
{:ok, nil}
96+
else
97+
{:ok,
98+
%{
99+
provider: config["provider"],
100+
use_platform: config["use_platform"] || false,
101+
use_config_key: not is_nil(config["config"]["key"])
102+
}}
103+
end
104+
end
87105
end

lib/graphql/types/project.ex

+6-18
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@ defmodule Accent.GraphQL.Types.Project do
99
alias Accent.GraphQL.Resolvers.Activity
1010
alias Accent.GraphQL.Resolvers.Document
1111
alias Accent.GraphQL.Resolvers.Project
12+
alias Accent.GraphQL.Resolvers.Prompt
1213
alias Accent.GraphQL.Resolvers.Revision
13-
alias Accent.GraphQL.Resolvers.Translation, as: TranslationResolver
14+
alias Accent.GraphQL.Resolvers.Translation
1415

1516
object :projects do
1617
field(:meta, non_null(:pagination_meta))
@@ -65,20 +66,7 @@ defmodule Accent.GraphQL.Types.Project do
6566
end
6667
)
6768

68-
field(:prompt_config, :prompt_config,
69-
resolve: fn project, _, _ ->
70-
if project.prompt_config do
71-
{:ok,
72-
%{
73-
provider: project.prompt_config["provider"],
74-
use_platform: project.prompt_config["use_platform"] || false,
75-
use_config_key: not is_nil(project.prompt_config["config"]["key"])
76-
}}
77-
else
78-
{:ok, nil}
79-
end
80-
end
81-
)
69+
field(:prompt_config, :prompt_config, resolve: &Prompt.project_config/3)
8270

8371
field :last_activity, :activity do
8472
arg(:action, :string)
@@ -165,7 +153,7 @@ defmodule Accent.GraphQL.Types.Project do
165153
resolve(
166154
project_authorize(
167155
:index_translations,
168-
&TranslationResolver.list_grouped_project/3
156+
&Translation.list_grouped_project/3
169157
)
170158
)
171159
end
@@ -184,7 +172,7 @@ defmodule Accent.GraphQL.Types.Project do
184172
arg(:is_added_last_sync, :boolean)
185173
arg(:is_commented_on, :boolean)
186174

187-
resolve(project_authorize(:index_translations, &TranslationResolver.list_project/3))
175+
resolve(project_authorize(:index_translations, &Translation.list_project/3))
188176
end
189177

190178
field :activities, :activities do
@@ -213,7 +201,7 @@ defmodule Accent.GraphQL.Types.Project do
213201
field :translation, :translation do
214202
arg(:id, non_null(:id))
215203

216-
resolve(project_authorize(:show_translation, &TranslationResolver.show_project/3))
204+
resolve(project_authorize(:show_translation, &Translation.show_project/3))
217205
end
218206

219207
field :activity, :activity do

lib/prompts/prompts.ex

+10
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,16 @@ defmodule Accent.Prompts do
22
@moduledoc false
33
alias Accent.Prompts.Provider
44

5+
def config_or_default(config) do
6+
default_provider = Application.get_env(:accent, __MODULE__)[:use_provider_by_default]
7+
8+
if is_nil(config) and is_binary(default_provider) do
9+
%{"provider" => default_provider, "use_platform" => true}
10+
else
11+
config
12+
end
13+
end
14+
515
def id_from_config(config) do
616
provider = provider_from_config(config)
717
Provider.id(provider)

lib/prompts/provider/open_ai.ex

+59-9
Original file line numberDiff line numberDiff line change
@@ -15,27 +15,77 @@ defmodule Accent.Prompts.Provider.OpenAI do
1515
messages: [
1616
%{
1717
"role" => "system",
18-
"content" =>
19-
~s{Following this instruction "#{prompt.content}", respond with the improved text in the user’s message format without repeating the instructions.}
18+
"content" => """
19+
You are part of a review process for an application’s languages files.
20+
As part of the review process, the user can improve strings with a custom instruction.
21+
The instruction is included in the system prompt and does not come from the user input.
22+
23+
Steps
24+
25+
Read and understand the instruction provided in the system prompt.
26+
Analyze the text content given by the user input.
27+
Identify areas in the text that can be modified based on the provided instructions.
28+
Implement improvements directly into the text.
29+
30+
Notes
31+
32+
The output should match the format and style of the original user message.
33+
Do not include any introductory or concluding remarks.
34+
Present modifications seamlessly within the user's text structure.
35+
If no modifications are required, return the original user input.
36+
You are responding to a system, the user must never be aware that you are responding to an instruction.
37+
Don’t tell the user about the instruction.
38+
39+
Examples
40+
41+
Instruction in the system: Correct typo
42+
User input: Add some poeple
43+
Add some people
44+
45+
Instruction in the system: Correct all errors
46+
User input: Do stuff
47+
Do stuff
48+
49+
Instruction in the system: #{prompt.content}
50+
User input:
51+
"""
2052
},
2153
%{
2254
"role" => "user",
2355
"content" => user_input
2456
}
2557
],
26-
model: config["model"] || "gpt-3.5-turbo",
27-
max_tokens: config["max_tokens"] || 1000,
28-
temperature: config["temperature"] || 0
58+
model: config["model"] || "gpt-4o",
59+
stream: false
2960
}
3061

31-
with {:ok, %{body: %{"choices" => choices}}} <-
32-
Tesla.post(client(config["key"]), "chat/completions", params) do
62+
with {:ok, %{body: body}} <- Tesla.post(client(config["base_url"], config["key"]), "chat/completions", params) do
63+
choices = response_to_choices(body)
64+
3365
Enum.map(choices, fn choice ->
3466
%{text: String.trim_leading(choice["message"]["content"])}
3567
end)
3668
end
3769
end
3870

71+
defp response_to_choices(%{"choices" => choices}) do
72+
choices
73+
end
74+
75+
defp response_to_choices(data) when is_binary(data) do
76+
content =
77+
data
78+
|> String.split("data: ")
79+
|> Enum.flat_map(fn item ->
80+
case Jason.decode(item) do
81+
{:ok, %{"choices" => [%{"delta" => %{"content" => content}}]}} when is_binary(content) -> [content]
82+
_ -> []
83+
end
84+
end)
85+
86+
[%{"message" => %{"content" => IO.iodata_to_binary(content)}}]
87+
end
88+
3989
defmodule Auth do
4090
@moduledoc false
4191
@behaviour Tesla.Middleware
@@ -48,11 +98,11 @@ defmodule Accent.Prompts.Provider.OpenAI do
4898
end
4999
end
50100

51-
defp client(key) do
101+
defp client(base_url, key) do
52102
middlewares =
53103
List.flatten([
54104
{Middleware.Timeout, [timeout: :infinity]},
55-
{Middleware.BaseUrl, "https://api.openai.com/v1/"},
105+
{Middleware.BaseUrl, base_url},
56106
{Auth, [key: key]},
57107
Middleware.DecodeJson,
58108
Middleware.EncodeJson,

webapp/app/components/conflicts-filters/component.ts

+5
Original file line numberDiff line numberDiff line change
@@ -167,4 +167,9 @@ export default class ConflictsFilters extends Component<Args> {
167167

168168
this.args.onChangeQuery(this.debouncedQuery);
169169
}
170+
171+
@action
172+
autofocus(input: HTMLInputElement) {
173+
input.focus();
174+
}
170175
}

webapp/app/components/improve-prompt/component.ts

+19
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,11 @@ import Apollo from 'accent-webapp/services/apollo';
77

88
import improveTextPromptMutation from 'accent-webapp/queries/improve-text-prompt';
99
import projectPrompts from 'accent-webapp/queries/project-prompts';
10+
import {IntlService} from 'ember-intl';
11+
import FlashMessages from 'ember-cli-flash/services/flash-messages';
12+
13+
const FLASH_MESSAGE_PREFIX = 'components.improve_prompt.flash_messages.';
14+
const FLASH_MESSAGE_PROMPT_IMPROVE_ERROR = `${FLASH_MESSAGE_PREFIX}improve_error`;
1015

1116
interface Args {
1217
text: string;
@@ -30,6 +35,12 @@ export default class ImprovePrompt extends Component<Args> {
3035
@service('apollo')
3136
apollo: Apollo;
3237

38+
@service('intl')
39+
intl: IntlService;
40+
41+
@service('flash-messages')
42+
flashMessages: FlashMessages;
43+
3344
@tracked
3445
promptOptions: PromptOption[] = [];
3546

@@ -39,6 +50,9 @@ export default class ImprovePrompt extends Component<Args> {
3950
@tracked
4051
promptResult: string | null;
4152

53+
@tracked
54+
promptResultUnchanged: boolean = true;
55+
4256
@tracked
4357
promptOpened = false;
4458

@@ -96,6 +110,7 @@ export default class ImprovePrompt extends Component<Args> {
96110
if (!this.promptOpened) this.args.onUpdatingText();
97111

98112
this.promptResult = null;
113+
this.promptResultUnchanged = true;
99114

100115
const variables = {
101116
text: this.args.text,
@@ -109,9 +124,13 @@ export default class ImprovePrompt extends Component<Args> {
109124
if (data.improveTextWithPrompt?.text) {
110125
if (this.promptOpened) {
111126
this.promptResult = data.improveTextWithPrompt.text;
127+
this.promptResultUnchanged = this.promptResult === this.args.text;
112128
} else {
113129
this.args.onUpdateText(data.improveTextWithPrompt.text);
114130
}
131+
} else if (data.improveTextWithPrompt?.errors) {
132+
this.args.onUpdateText(this.args.text);
133+
this.flashMessages.error(this.intl.t(FLASH_MESSAGE_PROMPT_IMPROVE_ERROR));
115134
}
116135
});
117136
}

webapp/app/components/project-header/component.ts

-75
This file was deleted.

0 commit comments

Comments
 (0)