brian-challenge / app.py
Christian Koch
question generator
0df07e9
raw
history blame
No virus
4.78 kB
import streamlit as st
from transformers import pipeline, PegasusForConditionalGeneration, PegasusTokenizer
import nltk
from fill_in_summary import FillInSummary
from paraphrase import PegasusParaphraser
import question_generator as q
# Question Generator Variables
ids = {'mt5-small': st.secrets['small'],
'mt5-base': st.secrets['base']}
st.set_page_config(layout="centered")
st.title('Question Generator by Eddevs')
select = st.selectbox('Type', ['Question Generator', 'Paraphrasing', 'Summarization', 'Fill in the blank'])
if select == "Question Generator":
with st.form("question_gen"):
# left_column, right_column = st.columns(2)
# left_column.selectbox('Type', ['Question Generator', 'Paraphrasing'])
#st.selectbox('Model', ['T5', 'GPT Neo-X'])
# Download all models from drive
q.download_models(ids)
# Model selection
model_path = st.selectbox('', options=[k for k in ids], index=1, help='Model to use. ')
model = q.load_model(model_path=f"model/{model_path}.ckpt")
text_input = st.text_area("Input Text")
submitted = st.form_submit_button("Generate")
split = st.checkbox('Split into sentences', value=True)
if split:
# Split into sentences
sent_tokenized = nltk.sent_tokenize(inputs)
res = {}
with st.spinner('Please wait while the inputs are being processed...'):
# Iterate over sentences
for sentence in sent_tokenized:
predictions = model.multitask([sentence], max_length=512)
questions, answers, answers_bis = predictions['questions'], predictions['answers'], predictions[
'answers_bis']
# Build answer dict
content = {}
for question, answer, answer_bis in zip(questions[0], answers[0], answers_bis[0]):
content[question] = {'answer (extracted)': answer, 'answer (generated)': answer_bis}
res[sentence] = content
# Answer area
st.write(res)
else:
with st.spinner('Please wait while the inputs are being processed...'):
# Prediction
predictions = model.multitask([inputs], max_length=512)
questions, answers, answers_bis = predictions['questions'], predictions['answers'], predictions[
'answers_bis']
# Answer area
zip = zip(questions[0], answers[0], answers_bis[0])
content = {}
for question, answer, answer_bis in zip:
content[question] = {'answer (extracted)': answer, 'answer (generated)': answer_bis}
st.write(content)
if submitted:
with st.spinner('Wait for it...'):
result = FillInSummary().summarize(text_input)
st.write(text_input)
elif select == "Summarization":
with st.form("summarization"):
# left_column, right_column = st.columns(2)
# left_column.selectbox('Type', ['Question Generator', 'Paraphrasing'])
#st.selectbox('Model', ['T5', 'GPT Neo-X'])
text_input = st.text_area("Input Text")
submitted = st.form_submit_button("Generate")
if submitted:
with st.spinner('Wait for it...'):
result = FillInSummary().summarize(text_input)
st.write(text_input)
elif select == "Fill in the blank":
with st.form("fill_in_the_blank"):
text_input = st.text_area("Input Text")
submitted = st.form_submit_button("Generate")
if submitted:
with st.spinner('Wait for it...'):
fill = FillInSummary()
result = fill.summarize(text_input)
result = fill.blank_ne_out(result)
st.write(result)
elif select == "Paraphrasing":
with st.form("paraphrasing"):
# st.selectbox('Model', ['T5', 'GPT Neo-X'])
left_column, right_column = st.columns(2)
count = left_column.slider('Count', 0, 10, 3)
temperature = right_column.slider('Temperature', 0.0, 10.0, 1.5)
text_input = st.text_area("Input Text")
submitted = st.form_submit_button("Generate")
if submitted:
with st.spinner('Wait for it...'):
paraphrase_model = PegasusParaphraser(num_return_sequences=count,temperature=temperature)
result = paraphrase_model.paraphrase(text_input)
st.write(result)
#if st.button('Generate'):
# st.write(input)
#st.success("We have generated 105 Questions for you")
# st.snow()
##else:
##nothing here