Skip to content

Commit

Permalink
Merge pull request #4 from DataScientest-Studio/refactor/load-recomme…
Browse files Browse the repository at this point in the history
…nder-model-def-in-api-predict

Refactor and Docs
  • Loading branch information
mikhaelbenilouz authored Dec 2, 2024
2 parents c09457c + 4e43791 commit 99da34f
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 45 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ System of recommendation of movies based on the user's profile and preferences.

## ℹ️ About the Project

This project is a starting pack for MLOps projects focused on the subject of "movie recommendation". It provides a structured framework to develop, train, and deploy machine learning models for recommending movies to users. It uses Supabase for the backend, Airflow for the orchestration, MLflow for the tracking, Minio for the storage of the models, Prometheus and Grafana for the monitoring.
This project is a starting pack for MLOps projects focused on the subject of "movie recommendation". It provides a fully integrated local development environment where all tools and applications are containerized and managed through Docker Compose. The stack includes Supabase for the backend, Airflow for workflow orchestration, MLflow for experiment tracking, Minio for model storage, and Prometheus/Grafana for monitoring - all configured to work together seamlessly in your local environment. This allows you to develop, train, and deploy locally machine learning models for recommending movies with a complete MLOps toolchain running entirely on your machine.

## 🏞 Showcase

Expand Down
83 changes: 39 additions & 44 deletions app/api/predict/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,29 +30,20 @@
model = None
model_infos = None

MLFLOW_URI = os.getenv("MLFLOW_TRACKING_URI", "http://tracking_server:5000")


def load_model_from_mlflow():
print(f"Tentative de connexion à MLflow sur : {MLFLOW_URI}")

mlflow.set_tracking_uri(MLFLOW_URI)

def load_recommender_model():
try:
# Essayer d'abord MLflow
mlflow_uri = os.getenv("MLFLOW_TRACKING_URI", "http://tracking_server:5000")
print(f"Tentative de connexion à MLflow sur : {mlflow_uri}")

mlflow.set_tracking_uri(mlflow_uri)
print("URI MLflow configuré")

# Vérifier la connexion à MLflow avec un délai d'attente
try:
import requests
response = requests.get(mlflow_uri, timeout=5) # Délai d'attente de 5 secondes
response.raise_for_status()
mlflow.search_runs()
print("Connexion à MLflow réussie")
except requests.exceptions.RequestException as e:
print(f"Erreur de connexion à MLflow: {str(e)}")
raise

# Tenter de charger le modèle
print("Tentative de chargement du modèle 'movie_recommender'")
response = requests.get(MLFLOW_URI, timeout=5)
response.raise_for_status()
mlflow.search_runs()
print("Connexion à MLflow réussie")

client = mlflow.tracking.MlflowClient()
model_champion = client.get_model_version_by_alias(
name="movie_recommender", alias="champion"
Expand All @@ -61,38 +52,42 @@ def load_recommender_model():
model = mlflow.sklearn.load_model(f"models:/movie_recommender/{model_version}")
print("Modèle chargé avec succès depuis MLflow")

MODEL_INFO.info({"model_name": "movie_recommender", "source": "mlflow"})
return model, model_version
except requests.exceptions.RequestException as e:
print(f"Erreur de connexion à MLflow: {str(e)}")
raise

model_infos = {
"model_name": "movie_recommender",
"model_version": model_version,
"source": "mlflow",
}

return model, model_infos
def load_model_locally():
print("Tentative de chargement du modèle local")

try:
with open("model.pkl", "rb") as f:
model = pickle.load(f)
print("Modèle local chargé avec succès")
return model, "local"
except Exception as e:
print("Tentative de chargement du modèle local de secours")
print(f"Erreur lors du chargement du modèle local: {str(e)}")
raise

try:
# Charger le modèle local
with open("model.pkl", "rb") as f:
model = pickle.load(f)
print("Modèle local chargé avec succès")

MODEL_INFO.info({"model_name": "movie_recommender", "source": "local"})
def load_recommender_model():
try:
model, model_version = load_model_from_mlflow()
source = "mlflow"
except Exception:
model, model_version = load_model_locally()
source = "local"

model_infos = {
"model_name": "movie_recommender",
"model_version": "local",
"source": "local",
}
model_infos = {
"model_name": "movie_recommender",
"model_version": model_version,
"source": source,
}

return model, model_infos
MODEL_INFO.info(model_infos)

except Exception as e:
print(f"Erreur lors du chargement du modèle local: {str(e)}")
raise
return model, model_infos


def make_predictions(genres, model):
Expand Down

0 comments on commit 99da34f

Please sign in to comment.