MindEase_api / app.py
tezodipta's picture
Create app.py
c969303 verified
raw
history blame contribute delete
906 Bytes
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# Load Model & Tokenizer
MODEL_NAME = "tezodipta/MindEase-Assistant-v0.1"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16, device_map="auto")
# Function to Generate Response
def generate_response(prompt):
input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device)
output = model.generate(input_ids, max_length=200, temperature=0.7, do_sample=True, top_p=0.9)
return tokenizer.decode(output[0], skip_special_tokens=True)
# Gradio UI
interface = gr.Interface(
fn=generate_response,
inputs="text",
outputs="text",
title="MindEase AI Assistant",
description="Chat with a Mental Health AI Assistant",
)
interface.launch(server_name="0.0.0.0", server_port=7860, share=True)