dellabee7's picture
Update app.py
f30dded verified
import gradio as gr
from huggingface_hub import InferenceClient
from PyPDF2 import PdfReader
import os
# PDF ํ…์ŠคํŠธ ๋ฏธ๋ฆฌ ์ฝ์–ด์˜ค๊ธฐ
def extract_pdf_text(pdf_paths):
full_text = ""
for path in pdf_paths:
reader = PdfReader(path)
for page in reader.pages:
text = page.extract_text()
if text:
full_text += text + "\n"
return full_text.strip()
# ๋ฏธ๋ฆฌ ์ง€์ •๋œ PDF ๋ฌธ์„œ๋“ค
pdf_context = extract_pdf_text([
"assets/Programming-Fundamentals-1570222270.pdf",
"assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
])
# Hugging Face Inference Client ์„ค์ • (starcoderbase ์‚ฌ์šฉ)
client = InferenceClient(
model="mistralai/Mistral-7B-Instruct-v0.1",
token=os.getenv("HUGGINGFACEHUB_API_TOKEN") # ๋ฐ˜๋“œ์‹œ ๋“ฑ๋ก ํ•„์š”
)
def respond(message, history, system_message, max_tokens, temperature, top_p):
# ๋Œ€ํ™” ํžˆ์Šคํ† ๋ฆฌ๋ฅผ ํ…์ŠคํŠธ๋กœ ์—ฐ๊ฒฐ
history_text = "\n".join(
[f"Q: {user}\nA: {assistant}" for user, assistant in history if user and assistant]
)
# StarCoder๋Š” messages ๊ตฌ์กฐ๋ฅผ ์ง€์›ํ•˜์ง€ ์•Š์œผ๋ฏ€๋กœ prompt ๊ธฐ๋ฐ˜
prompt = f"{system_message}\n\n๋ฌธ์„œ ์š”์•ฝ:\n{pdf_context}\n\n{history_text}\n\nQ: {message}\nA:"
result = client.text_generation(
prompt=prompt,
max_new_tokens=max_tokens,
temperature=temperature,
top_p=top_p
)
return result.strip()
# Gradio ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
demo = gr.ChatInterface(
fn=respond,
additional_inputs=[
gr.Textbox(value="๋‹น์‹ ์€ ํ”„๋กœ๊ทธ๋ž˜๋ฐ ๋ฌธ์„œ๋ฅผ ์ดํ•ดํ•˜๊ณ  ์ฝ”๋“œ ์˜ˆ์ œ๋กœ ์„ค๋ช…ํ•˜๋Š” ์นœ์ ˆํ•œ ์กฐ๊ต์ž…๋‹ˆ๋‹ค.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
],
title="๐Ÿ’ป ํŒŒ์ด์ฌ API ๊ธฐ๋ฐ˜ ์ฝ”๋”ฉ ๋„์šฐ๋ฏธ",
description="ํŒŒ์ด์ฌ ๊ฐ•์˜์ž๋ฃŒ ๊ธฐ๋ฐ˜์œผ๋กœ ์ฝ”๋“œ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•˜๋Š” StarCoder ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค."
)
if __name__ == "__main__":
demo.launch()