Skip to content

Commit

Permalink
Adding the "prompt" alternative
Browse files Browse the repository at this point in the history
  • Loading branch information
ProducerMatt committed Jun 19, 2023
1 parent 381f9fe commit 18ac706
Show file tree
Hide file tree
Showing 11 changed files with 43 additions and 20 deletions.
2 changes: 1 addition & 1 deletion modules/AlignmentNewsletterSearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def process_message(self, message: ServiceMessage) -> Response:

query = match.group("query")
return Response(
confidence=9, callback=self.process_search_request, args=[query]
confidence=9, callback=self.process_search_request, prompt=query
)

async def process_search_request(self, query) -> Response:
Expand Down
2 changes: 1 addition & 1 deletion modules/chatgpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def process_message(self, message: ServiceMessage) -> Response:
return Response()

return Response(
confidence=3, callback=self.chatgpt_chat, args=[message], kwargs={}
confidence=3, callback=self.chatgpt_chat, prompt=message,
)

def process_message_from_stampy(self, message) -> None:
Expand Down
6 changes: 3 additions & 3 deletions modules/duckduckgo.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,21 @@ def process_message(self, message: ServiceMessage) -> Response:
return Response(
confidence=10,
callback=self.ask,
args=[text[m.end(0):]],
prompt=text[m.end(0):],
why="This is definitely a web search",
)
print(f"Text didn't match: {text}")
if text.endswith("?"):
return Response(
confidence=6,
callback=self.ask,
args=[text],
prompt=text,
why="It's a question, we might be able to answer it",
)
return Response(
confidence=2,
callback=self.ask,
args=[text],
prompt=text,
why="It's not a question but we might be able to look it up",
)
return Response()
Expand Down
2 changes: 1 addition & 1 deletion modules/gpt3module.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def process_message(self, message: ServiceMessage) -> Response:
return Response()

return Response(
confidence=2, callback=self.gpt3_chat, args=[message], kwargs={}
confidence=2, callback=self.gpt3_chat, prompt=message,
)

def process_message_from_stampy(self, message: ServiceMessage) -> None:
Expand Down
9 changes: 9 additions & 0 deletions modules/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ class Response:
confidence: float = 0.0
text: Union[str, Iterable[str]] = ""
callback: Optional[Callable] = None
prompt: Optional[Union[ServiceMessage, str]] = None
args: list = field(default_factory=list)
kwargs: dict = field(default_factory=dict)

Expand All @@ -109,6 +110,13 @@ def __repr__(self) -> str:
kwargs = self.kwargs
module = str(self.module)
why = self.why
if prompt:
if isinstance(self.prompt, ServiceMessage):
prompt = self.prompt.id
else:
prompt = str(self.prompt)[:20] + "..." if len(self.prompt) > 20 else self.prompt # limit length
else:
prompt = None
return (
"Response("
+ (f"{embed=} " if embed else "")
Expand All @@ -119,6 +127,7 @@ def __repr__(self) -> str:
+ (f"{kwargs=} " if kwargs else "")
+ (f"{module=} " if module else "")
+ (f"{why=}" if why else "")
+ (f"{prompt=}" if prompt else "")
+ ")"
)

Expand Down
2 changes: 1 addition & 1 deletion modules/testModule.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def process_message(self, message: ServiceMessage):
return Response(
confidence=10,
callback=self.run_integration_tests,
args=[message],
prompt=message,
kwargs={"modules_dict": modules_dict},
)

Expand Down
2 changes: 1 addition & 1 deletion modules/videosearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def process_message(self, message):
m = re.match(self.re_search, text)
if m:
query = m.group("query")
return Response(confidence=9, callback=self.process_search_request, args=[query])
return Response(confidence=9, callback=self.process_search_request, prompt=query)

# This is either not at me, or not something we can handle
return Response()
Expand Down
4 changes: 2 additions & 2 deletions modules/why.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ def process_message(self, message: ServiceMessage) -> Response:
return Response(
confidence=10,
callback=self.specific,
args=[message],
prompt=message,
why="A stamp owner wants to know why I said something.",
)
else:
return Response(
confidence=10,
callback=self.general,
args=[message],
prompt=message,
why="A stamp owner wants to know why I said something.",
)
else:
Expand Down
4 changes: 2 additions & 2 deletions modules/wolfram.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ def process_message(self, message):
return Response(
confidence=5,
callback=self.ask,
args=[text],
prompt=text,
why="It's a question, we might be able to answer it",
)
else:
return Response(
confidence=1,
callback=self.ask,
args=[text],
prompt=text,
why="It's not a question but we might be able to look it up",
)

Expand Down
2 changes: 1 addition & 1 deletion runstampy
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ if [ "${STAMPY_RUN_TESTS:-nil}" != "nil" ]; then
pylint stam;
else
while true; do
python stam.py
python -Werror stam.py
EXIT_CODE=$?
# echo "Return code: ${EXIT_CODE}" # DEBUG
if [ "${STOP_ON_ERROR:-nil}" != "nil" ] && [ $EXIT_CODE != "42" ]; then
Expand Down
28 changes: 21 additions & 7 deletions servicemodules/discord.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ async def on_message(
for response in responses:
args_string = ""

if response.callback:
if response.callback and response.args:
args_string = ", ".join([a.__repr__() for a in response.args])
if response.kwargs:
args_string += ", " + ", ".join(
Expand All @@ -189,6 +189,7 @@ async def on_message(
for k, v in response.kwargs.items()
]
)

log.info(
self.class_name,
response_module=str(response.module),
Expand All @@ -197,6 +198,9 @@ async def on_message(
response_callback=(
response.callback.__name__ if response.callback else None
),
response_prompt=(
response.prompt if response.prompt else None
),
response_args=args_string,
response_text=(
response.text
Expand All @@ -222,13 +226,23 @@ async def on_message(
# Note that sometimes a callback will run but not send a message, in which case he'll seem to be typing but not say anything. I think this will be rare though.
async with message.channel._channel.typing():
if inspect.iscoroutinefunction(top_response.callback):
new_response = await top_response.callback(
*top_response.args, **top_response.kwargs
)
if top_response.args:
new_response = await top_response.callback(
*top_response.args, **top_response.kwargs
)
else:
new_response = await top_response.callback(
top-response.prompt
)
else:
new_response = top_response.callback(
*top_response.args, **top_response.kwargs
)
if top_response.args or top_response.kwargs:
new_response = top_response.callback(
*top_response.args, **top_response.kwargs
)
else:
new_response = top_response.callback(
top-response.prompt
)

new_response.module = top_response.module
new_response.text = limit_text_and_notify(
Expand Down

0 comments on commit 18ac706

Please sign in to comment.