Skip to content

Commit 16592c0

Browse files
committed
chore: improve gpt command and use redis cache to prevent generate same thing again
1 parent a534c5a commit 16592c0

File tree

4 files changed

+142
-42
lines changed

4 files changed

+142
-42
lines changed

helpers/gpt.js

Lines changed: 28 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,51 @@
11
import OpenAI from 'openai';
2+
import Redis from 'ioredis';
3+
import crypto from 'crypto';
24

35
/**
46
* @param {string} something - thing that will get a random of it
7+
* @param {string} lastAnswers - last answers to generate new thing
58
* @returns {string} the prompt for gpt
69
*/
7-
function generatePrompt(something) {
8-
return `Please give me a random ${something}`;
10+
function getSomethingToAsk(something, lastAnswers) {
11+
if (lastAnswers) {
12+
return `${something} (last answer: ${lastAnswers})`;
13+
}
14+
return `${something}`;
915
}
1016

1117
// eslint-disable-next-line require-jsdoc
1218
export async function getRandomFromGpt(something) {
19+
const somethingMd5 = crypto.createHash('md5').update(something).digest('hex');
20+
const client = new Redis(process.env.REDIS_URL);
21+
const lastAnswers = await client.get(somethingMd5);
1322
const openai = new OpenAI({
1423
apiKey: process.env?.OPENAI_API_KEY,
1524
});
25+
console.log('lastAnswers', lastAnswers);
1626

1727
const completion = await openai.chat.completions.create({
1828
model: 'gpt-4o-mini',
19-
messages: [{role: 'user', content: generatePrompt(something)}],
20-
temperature: 0.6,
21-
max_tokens: 65,
29+
messages: [
30+
{
31+
role: 'system', content: 'You are a system that generate randomly thing from user input. ' +
32+
'If user dont ask how many thing to generate, just give 1 result' +
33+
'don\'t use common answer.' +
34+
'if user says animals, you list 1000 animals from the database and pick at random,' +
35+
'to the point, no say \'sure\'' +
36+
'',
37+
},
38+
{role: 'user', content: getSomethingToAsk(something, lastAnswers)},
39+
],
40+
temperature: 1,
41+
max_tokens: 75,
2242
});
43+
2344
let answer = 'Sorry I don\'t know what you mean';
2445
if (completion?.choices.length > 0) {
2546
answer = completion.choices.map((a) => a.message.content.replace(/(\r\n|\n|\r)/gm, '')).join('\n');
2647
}
48+
await client.set(somethingMd5, lastAnswers ? lastAnswers + ', ' + answer : answer);
2749
return answer;
2850
}
51+

index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export async function app(req, res) {
2929
console.log(JSON.stringify(event));
3030
console.log(event.type,
3131
event.common?.invokedFunction || event.message?.slashCommand?.commandId || event.message?.argumentText,
32-
event.user.displayName, event.user.email, event.space.type, event.space.name, event.threadKey);
32+
event.user?.displayName, event.user?.email, event.space.type, event.space.name, event.threadKey);
3333
event.threadKey = event.threadKey ?? event.message?.thread?.name;
3434
let reply = {};
3535
// Dispatch slash and action events

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
"dependencies": {
1313
"@google-cloud/tasks": "^3.1.2",
1414
"googleapis": "^118.0.0",
15+
"ioredis": "^5.4.1",
1516
"openai": "^4.68.4"
1617
},
1718
"devDependencies": {

0 commit comments

Comments
 (0)