Sadok2109's picture
Update app.py
f279b7c verified
raw
history blame
1.55 kB
import os
from huggingface_hub import login
# ✅ Login sécurisé avec le secret
hf_token = os.environ.get("HUGGINGFACE_API_KEY")
login(hf_token)
from transformers import AutoModelForCausalLM, AutoProcessor
import torch
import gradio as gr
from PIL import Image
model_id = "ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1"
model = AutoModelForCausalLM.from_pretrained(
model_id,
trust_remote_code=True,
torch_dtype=torch.float16,
device_map="auto"
)
processor = AutoProcessor.from_pretrained(model_id)
def generate_answer(image, question):
if not question or question.strip() == "":
return "❌ Please enter a medical question."
prompt = f"### User: {question}\n### Assistant:"
try:
if image is None:
inputs = processor(prompt, return_tensors="pt").to(model.device)
else:
inputs = processor(prompt, images=image, return_tensors="pt").to(model.device)
outputs = model.generate(**inputs, max_new_tokens=256)
return processor.tokenizer.decode(outputs[0], skip_special_tokens=True)[len(prompt):].strip()
except Exception as e:
return f"⚠️ Internal Error: {str(e)}"
demo = gr.Interface(
fn=generate_answer,
inputs=[
gr.Image(type="pil", label="Upload a medical image (optional)"),
gr.Textbox(label="Medical Question")
],
outputs="text",
title="🧠 ContactDoctor - Biomedical LLM",
description="Multimodal Medical Assistant: upload an image and ask a medical question."
)
demo.launch()