Skip to content

Commit

Permalink
dc endpoint and slash commands support
Browse files Browse the repository at this point in the history
  • Loading branch information
sand194 committed Apr 16, 2024
1 parent 0ab4240 commit fbbee11
Show file tree
Hide file tree
Showing 3 changed files with 598 additions and 381 deletions.
40 changes: 38 additions & 2 deletions discord_bot/discord_bot/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import os
from collections import defaultdict
from typing import Self
import requests
import json

import discord
from discord.ext import commands
Expand Down Expand Up @@ -40,8 +42,29 @@ async def bad_button(
)


@bot.command()
@commands.has_any_role("Admins", "Moderators")
async def sync(ctx) -> None:
await ctx.send("Synchronizing commands...")
await bot.tree.sync()


def query_llm(prompt, stop_signs):
"""Returns llm response"""
url = "http://llm:9000/v1/completions"
headers = {"Content-Type": "application/json"}
data = {"prompt": prompt, "stop": stop_signs}

response = requests.post(url, headers=headers, data=json.dumps(data))

if response.status_code == 200:
return response.json()
else:
return response.text


async def get_chats_history():
"""Taking chat conversation from all chanells."""
"""Taking chat conversation from all channels."""
chats_history = defaultdict(list)
for guild in bot.guilds:
readable_channels = filter(
Expand All @@ -58,7 +81,7 @@ async def get_chats_history():

@bot.command()
async def show(ctx: commands.Context, limit: int = 100):
"""Shows what get_chats_history gets."""
"""Shows the results of get_chats_history"""
last_messages = await get_chats_history()
channel_id = ctx.channel.id
if last_messages[channel_id]:
Expand All @@ -68,6 +91,19 @@ async def show(ctx: commands.Context, limit: int = 100):
await ctx.send("Brak ostatnich wiadomości.")


@bot.tree.command(name="chatknml", description="Porozmawiaj z chatbotem")
async def chatknml(interaction: discord.Interaction, *, prompt: str):
"""Passes the prompt to the llm and returns the answer."""
await interaction.response.defer()

query = "\n\n### Instructions:\n" + prompt + "\n\n### Response:\n"
stop_signs = ["\n", "###"]

result = query_llm(query, stop_signs)

await interaction.followup.send(result["choices"][0]["text"])


def main():
"""Entrypoint."""
bot.run(os.environ["TOKEN"])
Expand Down
Loading

0 comments on commit fbbee11

Please sign in to comment.