-
Notifications
You must be signed in to change notification settings - Fork 1
/
app.py
125 lines (104 loc) · 3.82 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
# def imports():
import streamlit as st
import pandas as pd
import numpy as np
import time
import json
import torch
import pandas as pd
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
@st.cache
def pr_task3():
f = open('Covid.json', encoding="UTF-8")
data1 = json.load(f)
table = pd.DataFrame.from_dict(data1)
table = table.head(80)
return table
def non_cachable_task3():
model_name2 = 'google/tapas-medium-finetuned-wikisql-supervised'
table_ans = pipeline('table-question-answering', model = model_name2, tokenizer = model_name2)
return table_ans
def non_cachable_task1():
#global nlp1
model_name3 = 't5-small'
nlp1 = pipeline('summarization', model = model_name3)
return nlp1
@st.cache
def pr_task2():
df = pd.read_csv('data/faqs/faq_covidbert.csv')
# ques = df["question"][2]
sentences = df['answer']
text_task2 = " ''' "
for a in sentences:
#text+='<p>'
text_task2 +=a
#text+='<\p>'
text_task2 += " ''' "
return text_task2
def non_cachable_task2():
model_name = "deepset/roberta-base-squad2-covid"
nlp = pipeline('question-answering', model=model_name, tokenizer=model_name, args_parser= [])
return nlp
st.title('ELC PROJECT')
choice = ['Summary', 'Question Answer (Theoretical)', 'Question Answer (Statistical)' ]
option = st.sidebar.selectbox(
'Select an NLP Task from the given list',
choice,
)
if(option == 'Summary'):
#initializations()
st.write('## Summary ')
user_input = st.text_area("Content to summarize", 'Enter your text here')
values = st.sidebar.slider('Select a range of values', 10, 100, (25, 75))
st.sidebar.write('Min:', values[0])
st.sidebar.write('Max:', values[1])
if st.button('Compute'):
nlp1 = non_cachable_task1()
summarized_text = nlp1(user_input,min_length = values[0],max_length = values[1])
st.write(summarized_text[0]['summary_text'])
st.write("Length of summary: " ,len(summarized_text[0]['summary_text'].split()) )
elif option == 'Question Answer (Theoretical)' :
st.write('## Question Answer Theoretical ')
user_input = st.text_input("Ask a question", 'Enter question here')
value = st.sidebar.slider('Select number of closest answer to ',1,5, (1))
st.sidebar.write("No of Answers to Print", value)
text_ans = pr_task2()
if st.button('compute'):
nlp = non_cachable_task2()
QA_input = {
'question': user_input,
'context': text_ans,
}
res = nlp(QA_input,topk = value)
#st.write(res)
if(value>1):
for x in range(len(res)):
st.write('Answer ',x+1," :", res[x]['answer'])
st.write('score : ', res[x]['score'] )
else:
st.write('Answer ',1," :", res['answer'])
st.write('score : ', res['score'] )
elif option == 'Question Answer (Statistical)' :
st.write('## Question Answer Statistical ')
user_input = st.text_input("Ask a question", 'Enter question here')
# value = st.sidebar.slider('Select number of closest answer to ',1,5, (1))
# st.sidebar.write("No of Answers to Print", value)
table = pr_task3()
sh = 0
if st.checkbox('Show data'):
st.write(table)
if st.button('compute'):
nlp = non_cachable_task3()
res = nlp(table,user_input)
#st.write(res)
if len(res['answer'])>0:
st.write('Answer :', res['answer'])
else:
st.write("Sorry our model wasn't able to give answer to your question this time, try some other query please")
# latest_iteration = st.empty()
# bar = st.progress(0)
# for i in range(100):
# # Update the progress bar with each iteration.
# latest_iteration.text(f'Iteration {i+1}')
# bar.progress(i + 1)
# time.sleep(0)