Added token limits to OpenAI chat.

This commit is contained in:
Niels Simenon 2023-04-09 01:44:22 +02:00
parent 058ca10011
commit 21a90a51cf
5 changed files with 938 additions and 39 deletions

View File

@ -70,6 +70,10 @@ module.exports = {
'gpt-4',
],
model: 'gpt-3.5-turbo',
replyTokenLimit: 200,
userTokenLimit: 20000, // daily, roughly 100+ messages or $0.04 per user
userTokenPeriod: 24, // hours
temperature: 1,
history: 3,
rule: 'a tired game host',
rulePublic: true,

886
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -18,6 +18,7 @@
"author": "Niels Simenon",
"license": "ISC",
"dependencies": {
"better-sqlite3": "^8.3.0",
"bhttp": "^1.2.8",
"bottleneck": "^2.19.5",
"config": "^3.3.6",
@ -28,6 +29,7 @@
"irc-colors": "^1.5.0",
"irc-upd": "^0.11.0",
"jsdom": "^18.1.0",
"knex": "^2.4.2",
"linkify-it": "^3.0.3",
"markov-strings": "^3.0.1",
"simple-node-logger": "^21.8.12",

View File

@ -4,12 +4,40 @@ const config = require('config');
const bhttp = require('bhttp');
const style = require('../utils/style');
const knex = require('../knex');
const promptRegex = new RegExp(`^${config.usernamePrefix ? `${config.usernamePrefix}?` : ''}${config.user.username}[:,\\s]`, 'ui');
const history = new Map();
const settings = config.chat;
async function onStart(context) {
const totalExists = await knex.schema.hasTable('chat_tokens');
if (!totalExists) {
await knex.schema.createTable('chat_tokens', (table) => {
table.increments('id');
table.string('user_id')
.notNullable();
table.integer('tokens')
.notNullable();
table.timestamp('created')
.notNullable();
});
context.logger.info('Created database table \'chat_tokens\'');
}
const purgeResult = await knex('chat_tokens')
.where('created', '<=', knex.raw(`datetime('now', '-${config.chat.userTokenPeriod} hour')`))
.delete();
context.logger.info(`Purged ${purgeResult} expired chat token totals from databae`);
}
function setHistory(value, context) {
if (!value) {
context.sendMessage(`Chat history is set to ${style.bold(settings.history)}`, context.room.id, { label: false });
@ -18,7 +46,7 @@ function setHistory(value, context) {
const newHistory = Number(value);
if (!Number.isNaN(newHistory)) {
if (!Number.isNaN(newHistory) && newHistory > 0 && newHistory <= 10) {
settings.history = newHistory;
context.logger.info(`Chat history set to ${newHistory} by ${context.user.username}`);
@ -27,7 +55,27 @@ function setHistory(value, context) {
return;
}
context.logger.info(`Chat history must be a valid number, ${context.user.prefixedUsername}`);
context.logger.info(`Chat history must be a valid number between 0 and 10, ${context.user.prefixedUsername}`);
}
function setTemperature(value, context) {
if (!value) {
context.sendMessage(`Chat temperature is set to ${style.bold(settings.temperature)}`, context.room.id, { label: false });
return;
}
const newTemperature = Number(value);
if (!Number.isNaN(newTemperature) && newTemperature > 0 && newTemperature <= 2) {
settings.temperature = newTemperature;
context.logger.info(`Chat temperature set to ${newTemperature} by ${context.user.username}`);
context.sendMessage(`Chat temperature set to ${style.bold(newTemperature)} by ${context.user.prefixedUsername}`, context.room.id, { label: false });
return;
}
context.logger.info(`Chat temperature must be a valid number between 0 and 2, ${context.user.prefixedUsername}`);
}
function setModel(model, context) {
@ -86,6 +134,11 @@ async function onCommand(args, context) {
return;
}
if (['temperature', 'temp'].includes(context.subcommand) && config.operators.includes(context.user.username)) {
setTemperature(Number(args[0]), context);
return;
}
if (['rule', 'is', 'reset'].includes(context.subcommand) && (config.chat.rulePublic || config.operators.includes(context.user.username))) {
setRule(context.subcommand === 'reset' ? 'reset' : args.join(' '), context);
return;
@ -94,6 +147,19 @@ async function onCommand(args, context) {
const prompt = args.join(' ');
try {
const { used_tokens: usedTokens } = await knex('chat_tokens')
.sum('tokens as used_tokens')
.where('user_id', context.user.id)
.where('created', '>', knex.raw(`datetime('now', '-${config.chat.userTokenPeriod} hour')`)) // 1 day ago
.first();
if (usedTokens >= config.chat.userTokenLimit) {
context.logger.info(`${context.user.username} was rate limited at usedTokens: ${prompt}`);
context.sendMessage(`Sorry, I love talking with you ${context.user.prefixedUsername}, but I need to take a break :(`, context.room.id, { label: false });
return;
}
const message = {
role: 'user',
content: `Answer as if you're ${settings.rule}. ${prompt}`,
@ -105,6 +171,7 @@ async function onCommand(args, context) {
const res = await bhttp.post('https://api.openai.com/v1/chat/completions', JSON.stringify({
model: settings.model,
max_tokens: config.chat.replyTokenLimit,
messages: userHistory,
}), {
headers: {
@ -114,9 +181,16 @@ async function onCommand(args, context) {
});
const reply = res.body.choices?.[0].message;
const curatedContent = reply.content.replace(/\n+/g, '. ');
context.logger.info(`OpenAI ${config.chat.model} replied to ${context.user.username} (${res.body.usage.total_tokens} tokens): ${reply.content}`);
context.sendMessage(`${context.user.prefixedUsername}: ${reply.content}`, context.room.id, { label: false });
context.logger.info(`OpenAI ${config.chat.model} replied to ${context.user.username} (${res.body.usage.total_tokens} tokens, ${(usedTokens || 0) + res.body.usage.total_tokens}/${config.chat.userTokenLimit} used): ${curatedContent}`);
context.sendMessage(`${context.user.prefixedUsername}: ${curatedContent}`, context.room.id, { label: false });
await knex('chat_tokens').insert({
user_id: context.user.id,
tokens: res.body.usage.total_tokens,
created: knex.raw("datetime('now')"),
});
if (config.chat.history > 0) {
history.set(context.user.username, userHistory.concat(reply).slice(-settings.history));
@ -136,4 +210,5 @@ function onMessage(message, context) {
module.exports = {
onCommand,
onMessage,
onStart,
};

View File

@ -144,7 +144,7 @@ async function getGames(bot, identifier) {
};
if (game.onStart) {
game.onStart({ ...curatedGame, bot });
game.onStart({ ...curatedGame, bot, logger });
}
return {