Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -27,8 +27,8 @@ def get_model():
|
|
| 27 |
tokenizer, model = get_model()
|
| 28 |
|
| 29 |
# Streamlit UI
|
| 30 |
-
st.title("
|
| 31 |
-
st.write("This chatbot provides
|
| 32 |
|
| 33 |
if model is None or tokenizer is None:
|
| 34 |
st.error("Model failed to load. Please check the Hugging Face model path or environment configuration.")
|
|
@@ -38,7 +38,7 @@ else:
|
|
| 38 |
if st.button("Send"):
|
| 39 |
if user_input.strip():
|
| 40 |
# Construct the prompt
|
| 41 |
-
SYSTEM_PROMPT = "You are a helpful
|
| 42 |
full_prompt = f"{SYSTEM_PROMPT}\nUser: {user_input}\nAssistant:"
|
| 43 |
|
| 44 |
# Tokenize the input
|
|
|
|
| 27 |
tokenizer, model = get_model()
|
| 28 |
|
| 29 |
# Streamlit UI
|
| 30 |
+
st.title("LowCode Chatbot")
|
| 31 |
+
st.write("This chatbot provides interaction with LLM. Type your question below!")
|
| 32 |
|
| 33 |
if model is None or tokenizer is None:
|
| 34 |
st.error("Model failed to load. Please check the Hugging Face model path or environment configuration.")
|
|
|
|
| 38 |
if st.button("Send"):
|
| 39 |
if user_input.strip():
|
| 40 |
# Construct the prompt
|
| 41 |
+
SYSTEM_PROMPT = "You are a helpful assistant. Provide accurate and concise answers."
|
| 42 |
full_prompt = f"{SYSTEM_PROMPT}\nUser: {user_input}\nAssistant:"
|
| 43 |
|
| 44 |
# Tokenize the input
|