-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Feature implementation: Add support for Multiple PDF files
- Loading branch information
1 parent
2e01038
commit d891b91
Showing
3 changed files
with
66 additions
and
26 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,29 +1,50 @@ | ||
""" | ||
Filename: main.py | ||
Description: Implements endpoints for NekoPDF frontend to interact with | ||
Run: uvicorn main:app --reload | ||
""" | ||
from typing import List | ||
from fastapi import FastAPI | ||
from pydantic import BaseModel | ||
from llama_chat import get_llama_embeddings, get_llama_answers | ||
from typing import List | ||
from openai_chat import get_openai_answers, get_openai_embeddings | ||
|
||
app = FastAPI() | ||
|
||
class QA(BaseModel): | ||
""" | ||
QA Class used by methods for accepting and validating HTTP data transfer | ||
Parameters: | ||
chunks: Chunks of text to embed | ||
store_name: Name of the vectorstore from which to load or save to | ||
query: Query to be run on the vectorstore | ||
k: Top K similarity search | ||
""" | ||
chunks : List | ||
store_name : str | ||
query : str | ||
k: int | ||
|
||
@app.post('/qa/openai') | ||
def openai_response(input: QA): | ||
vectorstore = get_openai_embeddings(input.chunks, input.store_name) | ||
def openai_response(inputs: QA): | ||
""" | ||
Parameters: | ||
inputs: Instance of QA class used to accept inputs for generating response | ||
Returns: Response generated by OpenAI | ||
""" | ||
vectorstore = get_openai_embeddings(inputs.chunks, inputs.store_name) | ||
if input.query: | ||
response = get_openai_answers(vectorstore, input.query, input.k) | ||
response = get_openai_answers(vectorstore, inputs.query, inputs.k) | ||
return response | ||
|
||
@app.post('/qa/llama') | ||
def llama_response(input: QA): | ||
vectorstore = get_llama_embeddings(input.chunks, input.store_name) | ||
def llama_response(inputs: QA): | ||
""" | ||
Parameters: | ||
inputs: Instance of QA class used to accept inputs for generating response | ||
Returns: Response generated by Llama | ||
""" | ||
vectorstore = get_llama_embeddings(inputs.chunks, inputs.store_name) | ||
if input.query: | ||
response = get_llama_answers(vectorstore, input.query, input.k) | ||
response = get_llama_answers(vectorstore, inputs.query, inputs.k) | ||
return response | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters