Skip to content

Commit

Permalink
Merge pull request #7 from harshkasat/Feature/GPT_API
Browse files Browse the repository at this point in the history
Feature Adding GPT_API
  • Loading branch information
darshansharma authored Oct 11, 2023
2 parents 6ac8028 + 354232b commit d5af7ce
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 0 deletions.
1 change: 1 addition & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
API_KEY = "Your openai API_KEY"
Empty file added .gitignore
Empty file.
64 changes: 64 additions & 0 deletions GPT_API.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import openai
import os
from dotenv import main
main.load_dotenv()

# Langchain
from langchain.llms import OpenAI
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain.chains import SimpleSequentialChain
from langchain.chains.conversation.memory import ConversationBufferMemory

API_KEY = os.getenv('API_KEY')
try :
openai.api_key = API_KEY
except Exception as e:
print("The error is:", e)
# This just Open AI Using gpt-3.5-turbo
def get_message(usermessage):
usermessage = (usermessage)

response = openai.ChatCompletion.create(
model = 'gpt-3.5-turbo',
messages = usermessage
)
response = response['choices'][0]['message']['content']
return response

# This Open AI with Langchain

# Prompt Template
def LLMPromptTemplate(Question):
# This temperature is to make llm model to thing creative
llm = OpenAI(temperature = .5)
# This custom template so any user can edit this template
template = """Your are a amazing coder with loneliness Your duty to wite code and make more bug reports.Your job to find bugs in code
Question: {text}
Answer:
"""

# We add input in {text}
prompt_template = PromptTemplate(input_variables = ["text"], template = template)
answer = LLMChain(llm=llm, prompt= prompt_template)
return answer.run(Question)

def Chatbot(Question):
llm = OpenAI(temperature = 0.5)
# Make custom template so any user
template = """ I'm BATMAN
{chat_history}
Human : {Question}
AI:
"""
# Add input in Question and chat history is used to store the answer history
prompt_template = PromptTemplate(input_variables = ["chat_history", "Question"], template = template )
# storing the answer history
memory = ConversationBufferMemory(memory_key = "chat_history")
answer = LLMChain(
llm = llm,
prompt = prompt_template,
verbose = True,
memory = memory,
)
return answer.predict(Question)
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ I was using Ubuntu 16.04 during the time I coded snorlax. Snorlax will work on M
Python 2.7 is already installed on most of the Linux distro.
1. Install pip by command - `sudo apt-get install python-pip`
2. Use command - `pip install -r requirements.txt` to install additional packages.
3. For GPT API_KEY create .env and paste over there ``API_KEY = "Your openai API_KEY"``

## To-Do List
1. Render the HTML on CLI using pandoc.
Expand Down
28 changes: 28 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import textwrap
import sys
import wikipedia
from GPT_API import *


query = str(sys.argv[2])
Expand All @@ -23,3 +24,30 @@
print textwrap.fill(textwrap.dedent(p.get_text()).strip(), initial_indent='', subsequent_indent=' ')
'''
# This code for Openai with langchain
print(" DO you want to use GPT with langchain choose options ")
print("1 Yes")
print("2 No")

try :
choice = int(input("Enter the option "))
if choice == 1:
print("What do you want to select the choice ")
print("1 Normal GPT")
print("2 GPT with langchain")
print("3 GPT with langchain using chatbot")
Gpt_choice = int(input("Enter the option "))
if Gpt_choice == 1:
Question = input("What is your question ")
answer = get_message(Question)
print(answer)
elif Gpt_choice == 2:
Question = input("What is your question ")
answer = LLMPromptTemplate(Question)
print(answer)
else:
Question = input("What is your question ")
answer = Chatbot(Question)
print(answer)
except Exception as e :
print("the error is:", e)
3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
bs4
requests
wikipedia
langchain
openai
dotenv-python

0 comments on commit d5af7ce

Please sign in to comment.