-
Notifications
You must be signed in to change notification settings - Fork 1
/
ai.js
100 lines (81 loc) · 2.4 KB
/
ai.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
const OpenAI = require('openai-api');
const rp = require('request-promise');
const config = require('./config');
const openai = new OpenAI(config.GPT3_TOKEN);
const REMEMBER = 8;
const NORMAL = {
start: `The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly. \n\nHuman: Hello, who are you?\nAI: I am an AI created by OpenAI. How can I help you today?`,
maxTokens: 150,
temperature: 0.9,
topP: 1,
presencePenalty: 0.6,
frequencyPenalty: 0,
name: "AI",
otherName: "Human"
}
const AI_MODIFIER = NORMAL;
class AI {
constructor() {
this.lastResponse;
this.conversation = [];
}
/**
* Add human message to conversation
*
* @param {string} message What the human said
*/
human(message) {
if (this.conversation.length >= REMEMBER) {
this.conversation.shift();
this.conversation.shift();
}
this.conversation.push({
author: 1,
prompt: message
});
console.log(AI_MODIFIER.otherName + ":", message);
}
/**
* Add AI message to conversation
*
* @param {string} message What the AI said
*/
ai(message) {
this.conversation.push({
author: 0,
prompt: message
});
console.log(AI_MODIFIER.name + ":", message);
}
/**
* Generate the conversation that OpenAI receives
*/
getConversation() {
let conv = AI_MODIFIER.start;
for (const message of this.conversation) {
conv += `\n${message.author ? AI_MODIFIER.otherName : AI_MODIFIER.name}: ${message.prompt}`;
}
conv += `\n${AI_MODIFIER.name}:`;
return conv;
}
/**
* Call GPT-3 API
*/
async completion() {
const gptResponse = await openai.complete({
engine: 'davinci',
prompt: this.getConversation(),
maxTokens: AI_MODIFIER.maxTokens,
temperature: AI_MODIFIER.temperature,
topP: AI_MODIFIER.topP,
presencePenalty: AI_MODIFIER.presencePenalty,
frequencyPenalty: AI_MODIFIER.frequencyPenalty,
bestOf: 1,
n: 1,
stream: false,
stop: ["\n", AI_MODIFIER.otherName + ":", AI_MODIFIER.name + ":"]
});
return gptResponse.data;
}
}
module.exports = AI;