import streamlit as st from transformers import pipeline # Title st.title("📘 Quiz Generator from Text") st.write("Enter a paragraph, and this app will generate MCQs using a Hugging Face model.") # Load model only once @st.cache_resource def load_model(): return pipeline("e2e-qg", model="valhalla/t5-base-e2e-qg") qg_pipeline = load_model() # Text input text = st.text_area("Enter your paragraph here:", height=200) if st.button("Generate Quiz"): if not text.strip(): st.warning("Please enter some text first.") else: with st.spinner("Generating questions..."): try: result = qg_pipeline(text) st.success("Here are your questions:") for idx, item in enumerate(result): st.markdown(f"**Q{idx+1}. {item['question']}**") st.markdown(f"- Answer: `{item['answer']}`") st.markdown("---") except Exception as e: st.error(f"Error: {str(e)}")