Skip to content

Commit 154aa50

Browse files
committed
Add application file
1 parent 3920816 commit 154aa50

3 files changed

Lines changed: 49 additions & 6 deletions

File tree

Dockerfile

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,31 @@
1-
FROM python:3.11-slim
1+
FROM python:3.10-slim
22

3-
ENV PYTHONDONTWRITEBYTECODE=1 \
4-
PYTHONUNBUFFERED=1
3+
# Installer dépendances système nécessaires pour torch et transformers
4+
RUN apt-get update && apt-get install -y \
5+
build-essential \
6+
curl \
7+
git \
8+
&& rm -rf /var/lib/apt/lists/*
59

610
WORKDIR /app
711

12+
# Copier le fichier requirements.txt
813
COPY requirements.txt .
14+
15+
# Upgrade pip
16+
RUN pip install --upgrade pip
17+
18+
# Installer les dépendances Python, incluant torch CPU
19+
RUN pip install --no-cache-dir torch==2.2.0 -f https://download.pytorch.org/whl/cpu/torch_stable.html
20+
RUN pip install --no-cache-dir "numpy<2"
921
RUN pip install --no-cache-dir -r requirements.txt
1022

11-
COPY app ./app
23+
# Copier le reste du projet
24+
COPY . .
1225
COPY static ./static
1326

27+
# Exposer le port 7860 (obligatoire pour HF Spaces)
1428
EXPOSE 7860
15-
# Hugging Face Spaces fournit $PORT
16-
CMD uvicorn app.main:app --host 0.0.0.0 --port ${PORT:-7860}
29+
30+
# Lancer FastAPI via Uvicorn (forme JSON recommandée)
31+
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "7860"]

app.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
import gradio as gr
2+
from transformers import pipeline
3+
import os
4+
5+
# Charger le modèle (identique à celui de ton API)
6+
MODEL_NAME = os.getenv("HF_MODEL", "distilbert-base-uncased-finetuned-sst-2-english")
7+
sentiment_pipe = pipeline("sentiment-analysis", model=MODEL_NAME)
8+
9+
# Fonction de prédiction
10+
def predict_sentiment(text):
11+
result = sentiment_pipe(text)[0]
12+
label = "positive" if result["label"].upper().startswith("POS") else "negative"
13+
confidence = float(result["score"])
14+
return f"Label: {label}, Confidence: {confidence:.4f}"
15+
16+
# Interface Gradio
17+
iface = gr.Interface(
18+
fn=predict_sentiment,
19+
inputs=gr.Textbox(lines=3, placeholder="Entrez votre texte ici..."),
20+
outputs="text",
21+
title="Sentiment Analysis Demo",
22+
description=f"Modèle: {MODEL_NAME}"
23+
)
24+
25+
# Lancer l'interface en local
26+
if __name__ == "__main__":
27+
iface.launch(share=True)
28+

requirements.txt

-8.13 KB
Binary file not shown.

0 commit comments

Comments
 (0)