Skip to content

Commit

Permalink
v35.02; stream chat output with func call enabled
Browse files Browse the repository at this point in the history
  • Loading branch information
eliranwong committed Oct 5, 2023
1 parent 292b83e commit 7ed06a2
Show file tree
Hide file tree
Showing 6 changed files with 306 additions and 60 deletions.
2 changes: 1 addition & 1 deletion UniqueBibleAppVersion.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
35.01
35.02
163 changes: 142 additions & 21 deletions gui/Worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,133 @@ def __init__(self, parent):
self.parent = parent
self.threadpool = QThreadPool()

def fineTunePythonCode(self, code):
insert_string = "import config\nconfig.pythonFunctionResponse = "
code = re.sub("^!(.*?)$", r"import os\nos.system(\1)", code, flags=re.M)
if "\n" in code:
substrings = code.rsplit("\n", 1)
lastLine = re.sub("print\((.*)\)", r"\1", substrings[-1])
code = code if lastLine.startswith(" ") else f"{substrings[0]}\n{insert_string}{lastLine}"
else:
code = f"{insert_string}{code}"
return code

def getFunctionResponse(self, response_message, function_name):
if function_name == "python":
config.pythonFunctionResponse = ""
python_code = textwrap.dedent(response_message["function_call"]["arguments"])
refinedCode = self.fineTunePythonCode(python_code)

print("--------------------")
print(f"running python code ...")
if config.developer or config.codeDisplay:
print("```")
print(python_code)
print("```")
print("--------------------")

try:
exec(refinedCode, globals())
function_response = str(config.pythonFunctionResponse)
except:
function_response = python_code
info = {"information": function_response}
function_response = json.dumps(info)
else:
fuction_to_call = config.chatGPTApiAvailableFunctions[function_name]
function_args = json.loads(response_message["function_call"]["arguments"])
function_response = fuction_to_call(function_args)
return function_response

def getStreamFunctionResponseMessage(self, completion, function_name):
function_arguments = ""
for event in completion:
delta = event["choices"][0]["delta"]
if delta and delta.get("function_call"):
function_arguments += delta["function_call"]["arguments"]
return {
"role": "assistant",
"content": None,
"function_call": {
"name": function_name,
"arguments": function_arguments,
}
}

def runCompletion(self, thisMessage, progress_callback):
self.functionJustCalled = False
def runThisCompletion(thisThisMessage):
if config.chatGPTApiFunctionSignatures and not self.functionJustCalled:
return openai.ChatCompletion.create(
model=config.chatGPTApiModel,
messages=thisThisMessage,
n=1,
temperature=config.chatGPTApiTemperature,
max_tokens=config.chatGPTApiMaxTokens,
functions=config.chatGPTApiFunctionSignatures,
function_call=config.chatGPTApiFunctionCall,
stream=True,
)
return openai.ChatCompletion.create(
model=config.chatGPTApiModel,
messages=thisThisMessage,
n=1,
temperature=config.chatGPTApiTemperature,
max_tokens=config.chatGPTApiMaxTokens,
stream=True,
)

while True:
completion = runThisCompletion(thisMessage)
function_name = ""
try:
# consume the first delta
for event in completion:
delta = event["choices"][0]["delta"]
# Check if a function is called
if not delta.get("function_call"):
self.functionJustCalled = True
elif "name" in delta["function_call"]:
function_name = delta["function_call"]["name"]
# check the first delta is enough
break
# Continue only when a function is called
if self.functionJustCalled:
break

# get stream function response message
response_message = self.getStreamFunctionResponseMessage(completion, function_name)

# get function response
function_response = self.getFunctionResponse(response_message, function_name)

# process function response
# send the info on the function call and function response to GPT
thisMessage.append(response_message) # extend conversation with assistant's reply
thisMessage.append(
{
"role": "function",
"name": function_name,
"content": function_response,
}
) # extend conversation with function response

self.functionJustCalled = True

if not config.chatAfterFunctionCalled:
progress_callback.emit("\n\n~~~ ")
progress_callback.emit(function_response)
return None
except:
self.showErrors()
break

return completion

def showErrors(self):
if config.developer:
print(traceback.format_exc())

def getResponse(self, messages, progress_callback, functionJustCalled=False):
responses = ""
if config.chatGPTApiLoadingInternetSearches == "always" and not functionJustCalled:
Expand Down Expand Up @@ -118,27 +245,21 @@ def getResponse(self, messages, progress_callback, functionJustCalled=False):
except:
print("Unable to load internet resources.")
try:
if config.chatGPTApiNoOfChoices == 1 and (config.chatGPTApiFunctionCall == "none" or not config.chatGPTApiFunctionSignatures or functionJustCalled):
completion = openai.ChatCompletion.create(
model=config.chatGPTApiModel,
messages=messages,
max_tokens=config.chatGPTApiMaxTokens,
temperature=config.chatGPTApiTemperature,
n=config.chatGPTApiNoOfChoices,
stream=True,
)
progress_callback.emit("\n\n~~~ ")
for event in completion:
# stop generating response
stop_file = ".stop_chatgpt"
if os.path.isfile(stop_file):
os.remove(stop_file)
break
# RETRIEVE THE TEXT FROM THE RESPONSE
event_text = event["choices"][0]["delta"] # EVENT DELTA RESPONSE
progress = event_text.get("content", "") # RETRIEVE CONTENT
# STREAM THE ANSWER
progress_callback.emit(progress)
if config.chatGPTApiNoOfChoices == 1:
completion = self.runCompletion(messages, progress_callback)
if completion is not None:
progress_callback.emit("\n\n~~~ ")
for event in completion:
# stop generating response
stop_file = ".stop_chatgpt"
if os.path.isfile(stop_file):
os.remove(stop_file)
break
# RETRIEVE THE TEXT FROM THE RESPONSE
event_text = event["choices"][0]["delta"] # EVENT DELTA RESPONSE
progress = event_text.get("content", "") # RETRIEVE CONTENT
# STREAM THE ANSWER
progress_callback.emit(progress)
else:
if config.chatGPTApiFunctionSignatures:
completion = openai.ChatCompletion.create(
Expand Down
4 changes: 4 additions & 0 deletions latest_changes.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
Changes in 35.02:
Improved plugin "Bible Chat":
* support output stream with function calling enabled

Changes in 35.01:
Improved plugin "Bible Chat":
* Added previous entered commands to auto completion
Expand Down
12 changes: 6 additions & 6 deletions patches.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1385,11 +1385,11 @@
(34.97, "file", "gui/MainWindow.py")
(34.97, "file", "util/ConfigUtil.py")
(34.98, "file", "plugins/chatGPT/integrate google searches.py")
(35.00, "file", "gui/Worker.py")
(35.00, "file", "plugins/chatGPT/execute python code.py")
(35.00, "file", "util/LocalCliHandler.py")
(35.01, "file", "plugins/chatGPT/000_UBA.py")
(35.01, "file", "plugins/menu/Bible Chat.py")
(35.01, "file", "patches.txt")
(35.01, "file", "UniqueBibleAppVersion.txt")
(35.01, "file", "latest_changes.txt")
(35.02, "file", "gui/Worker.py")
(35.02, "file", "plugins/chatGPT/execute python code.py")
(35.02, "file", "util/LocalCliHandler.py")
(35.02, "file", "patches.txt")
(35.02, "file", "UniqueBibleAppVersion.txt")
(35.02, "file", "latest_changes.txt")
19 changes: 12 additions & 7 deletions plugins/chatGPT/execute python code.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,21 @@
# Open VLC player.

def run_python(function_args):
def fineTunePythonCode(code):
insert_string = "import config\nconfig.pythonFunctionResponse = "
code = re.sub("^!(.*?)$", r"import os\nos.system(\1)", code, flags=re.M)
if "\n" in code:
substrings = code.rsplit("\n", 1)
lastLine = re.sub("print\((.*)\)", r"\1", substrings[-1])
code = code if lastLine.startswith(" ") else f"{substrings[0]}\n{insert_string}{lastLine}"
else:
code = f"{insert_string}{code}"
return code

# retrieve argument values from a dictionary
#print(function_args)
function_args = function_args.get("code") # required

insert_string = "import config\nconfig.pythonFunctionResponse = "
if "\n" in function_args:
substrings = function_args.rsplit("\n", 1)
new_function_args = f"{substrings[0]}\n{insert_string}{substrings[-1]}"
else:
new_function_args = f"{insert_string}{function_args}"
new_function_args = fineTunePythonCode(function_args)
try:
exec(new_function_args, globals())
function_response = str(config.pythonFunctionResponse)
Expand Down
Loading

0 comments on commit 7ed06a2

Please sign in to comment.