From 312bae9c2e7990274893d42c93036e0bbac09237 Mon Sep 17 00:00:00 2001 From: antoinepela Date: Wed, 4 Dec 2024 23:03:10 +0400 Subject: [PATCH] Update Kube api & streamlit --- Makefile | 18 ++- docker/fastapi/Dockerfile | 13 +- docker/fastapi/auth.py | 75 +++++++-- docker/fastapi/database.py | 45 ------ docker/fastapi/predict.py | 142 ++++++++++-------- docker/fastapi/requirements.txt | 26 ++-- .../Dockerfile | 0 .../requirements.txt | 0 .../train_models.py | 0 .../streamlit/app/pages/4_Authentification.py | 4 +- docker/streamlit/app/pages/5_Application.py | 8 +- kubernetes/deployments/fastapi-deployment.yml | 50 +++--- .../fastapi-persistent-volume.yml | 58 ------- .../persistent-volumes/mlfow-storage-pvc.yml | 2 +- kubernetes/secrets/api-secrets.yaml | 12 ++ kubernetes/services/api-service.yml | 23 +++ 16 files changed, 246 insertions(+), 230 deletions(-) delete mode 100755 docker/fastapi/database.py rename docker/{python_train_svd_model.py => python_train_models.py}/Dockerfile (100%) rename docker/{python_train_svd_model.py => python_train_models.py}/requirements.txt (100%) rename docker/{python_train_svd_model.py => python_train_models.py}/train_models.py (100%) delete mode 100755 kubernetes/persistent-volumes/fastapi-persistent-volume.yml create mode 100644 kubernetes/secrets/api-secrets.yaml create mode 100644 kubernetes/services/api-service.yml diff --git a/Makefile b/Makefile index 43f46a8..f66cf23 100755 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -NAMESPACE1 = reco-movies +NAMESPACE1 = api NAMESPACE2 = airflow NAMESPACE3 = mlflow @@ -145,6 +145,12 @@ start-mlflow: helm install mlf-ts bitnami/mlflow --namespace mlflow --create-namespace kubectl apply -f kubernetes/services/mlflow-service.yml +start-api: + kubectl create namespace api + kubectl apply -f kubernetes/deployments/fastapi-deployment.yml + kubectl apply -f kubernetes/deployments/streamlit-deployment.yml + kubectl apply -f kubernetes/secrets/api-secrets.yaml + kubectl apply -f kubernetes/services/api-service.yml delete-pv-airflow: kubectl delete pv airflow-local-dags-folder || true @@ -161,6 +167,15 @@ reco: namespace pv secrets configmaps deployments services ingress namespace: check-kube kubectl apply -f kubernetes/namespace/namespace.yml --validate=false +change-namespace-api: + kubectl config set-context --current --namespace=$(NAMESPACE1) + +change-namespace-airflow: + kubectl config set-context --current --namespace=$(NAMESPACE2) + +change-namespace-mlflow: + kubectl config set-context --current --namespace=$(NAMESPACE3) + pv: check-kube kubectl apply -f kubernetes/persistent-volumes/fastapi-persistent-volume.yml --validate=false kubectl apply -f kubernetes/persistent-volumes/grafana-persistent-volume.yml --validate=false @@ -200,6 +215,5 @@ clean-kube-reco: check-kube clean-kube-airflow: check-kube kubectl delete namespace $(NAMESPACE2) - clean-kube-mlflow: check-kube kubectl delete namespace $(NAMESPACE3) \ No newline at end of file diff --git a/docker/fastapi/Dockerfile b/docker/fastapi/Dockerfile index 50812c3..04fccda 100755 --- a/docker/fastapi/Dockerfile +++ b/docker/fastapi/Dockerfile @@ -1,11 +1,22 @@ +# Utiliser une image de base Python FROM python:3.12 +# Définir le répertoire de travail WORKDIR /app +# Installer les dépendances système nécessaires +RUN apt-get update && \ + apt-get install -y python3-distutils gcc g++ && \ + apt-get clean + +# Copier le fichier requirements.txt COPY ./requirements.txt /app/requirements.txt +# Installer les dépendances Python RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt +# Copier le reste de l'application COPY . . -CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] \ No newline at end of file +# Commande par défaut pour exécuter l'application (ajustez selon vos besoins) +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/docker/fastapi/auth.py b/docker/fastapi/auth.py index 64fff82..9fa0d24 100755 --- a/docker/fastapi/auth.py +++ b/docker/fastapi/auth.py @@ -3,7 +3,6 @@ from fastapi import APIRouter, Depends, HTTPException from pydantic import BaseModel from starlette import status -from database import get_db_connection from passlib.context import CryptContext from fastapi.security import OAuth2PasswordRequestForm, OAuth2PasswordBearer from jose import jwt, JWTError @@ -14,11 +13,15 @@ import re import psycopg2 import logging +from kubernetes import client, config # Configurer le logger logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +# Charger la configuration Kubernetes +config.load_incluster_config() + # Création d'un routeur pour gérer les routes d'authentification router = APIRouter( prefix='/auth', # Préfixe pour toutes les routes dans ce routeur @@ -108,6 +111,26 @@ class Token(BaseModel): registry=collector ) +def load_config(): + """Charge la configuration de la base de données à partir des variables d'environnement.""" + return { + 'host': os.getenv('AIRFLOW_POSTGRESQL_SERVICE_HOST'), + 'database': os.getenv('DATABASE'), + 'user': os.getenv('USER'), + 'password': os.getenv('PASSWORD') + } + +def connect(config): + """Connecte au serveur PostgreSQL et retourne la connexion.""" + try: + conn = psycopg2.connect(**config) + print('Connected to the PostgreSQL server.') + return conn + except (psycopg2.DatabaseError, Exception) as error: + print(f"Connection error: {error}") + return None + + # Fonction pour valider le nom d'utilisateur def validate_username(username): if re.match("^[A-Za-z0-9_]+$", username) is None: @@ -162,8 +185,11 @@ async def create_user(create_user_request: CreateUserRequest): logger.error(f"Erreur validation mot de passe: {password_error}") raise HTTPException(status_code=400, detail=password_error) - try: - with get_db_connection() as conn: + config = load_config() + conn = connect(config) + + if conn is not None: + try: with conn.cursor() as cur: cur.execute("SELECT email FROM users WHERE email = %s", (create_user_request.email,)) if cur.fetchone() is not None: @@ -174,14 +200,22 @@ async def create_user(create_user_request: CreateUserRequest): # Créer le nouvel utilisateur hached_password = bcrypt_context.hash(create_user_request.password) - cur.execute("INSERT INTO users (username, email, hached_password) VALUES (%s, %s, %s)", (create_user_request.username, create_user_request.email, hached_password,)) + cur.execute("INSERT INTO users (username, email, hached_password) VALUES (%s, %s, %s)", + (create_user_request.username, create_user_request.email, hached_password,)) conn.commit() logger.info("Utilisateur créé avec succès") - except psycopg2.Error as e: - error_counter.labels(error_type='database_error').inc() - logger.error(f"Erreur base de données: {str(e)}") - raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") + except psycopg2.Error as e: + error_counter.labels(error_type='database_error').inc() + logger.error(f"Erreur base de données: {str(e)}") + raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") + + finally: + conn.close() # Assurez-vous de fermer la connexion après utilisation + + else: + logger.error("Échec de la connexion à la base de données.") + raise HTTPException(status_code=500, detail="Database connection failed.") duration = time.time() - start_time user_creation_duration_histogram.labels(status_code='201').observe(duration) @@ -218,8 +252,11 @@ async def login_for_access_token(form_data: Annotated[OAuth2PasswordRequestForm, return response async def authenticate_user(email: str, password: str): - try: - with get_db_connection() as conn: + config = load_config() + conn = connect(config) + + if conn is not None: + try: with conn.cursor() as cur: cur.execute( "SELECT userid, username, email, hached_password FROM users WHERE email = %s", @@ -247,11 +284,16 @@ async def authenticate_user(email: str, password: str): logger.info(f"Valeur de userId dans authenticate_user: {user_data['userId']}") return user_data - except psycopg2.Error as e: - error_counter.labels(error_type='database_error').inc() - logger.error(f"Erreur base de données: {str(e)}") - raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") + except psycopg2.Error as e: + error_counter.labels(error_type='database_error').inc() + logger.error(f"Erreur base de données: {str(e)}") + raise HTTPException(status_code=500, detail=f"Database error: {str(e)}") + finally: + conn.close() # Assurez-vous de fermer la connexion après utilisation + else: + logger.error("Échec de la connexion à la base de données.") + raise HTTPException(status_code=500, detail="Database connection failed.") # Fonction pour créer un token d'accès def create_access_token(email: str, user_id: int, expires_delta: timedelta): @@ -266,7 +308,10 @@ async def get_current_user(token: Annotated[str, Depends(oauth2_bearer)]): payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) # Décode le token email: str = payload.get('sub') # Récupère le nom d'utilisateur user_id: int = payload.get('id') # Récupère l'ID de l'utilisateur - with get_db_connection() as conn: + config = load_config() + conn = connect(config) + + if conn is not None: with conn.cursor() as cur: cur.execute( "SELECT userid, username FROM users WHERE userid = %s", diff --git a/docker/fastapi/database.py b/docker/fastapi/database.py deleted file mode 100755 index ef0075a..0000000 --- a/docker/fastapi/database.py +++ /dev/null @@ -1,45 +0,0 @@ -import psycopg2 -from dotenv import load_dotenv -from contextlib import contextmanager -import os - -# Charger les variables d'environnement à partir du fichier .env -load_dotenv() - -POSTGRES_USER= os.getenv('POSTGRES_USER') -POSTGRES_PASSWORD= os.getenv('POSTGRES_PASSWORD') -POSTGRES_DB= os.getenv('POSTGRES_DB') -POSTGRES_HOST= os.getenv('POSTGRES_HOST') -POSTGRES_PORT= os.getenv('POSTGRES_PORT') - -@contextmanager -def get_db_connection(): - """ - Gestionnaire de contexte pour la connexion à la base de données. - Ouvre une connexion et la ferme automatiquement après utilisation. - - Utilisation: - with get_db_connection() as conn: - with conn.cursor() as cur: - cur.execute("SELECT * FROM table") - """ - conn = None - try: - conn = psycopg2.connect( - database=POSTGRES_DB, - host=POSTGRES_HOST, - user=POSTGRES_USER, - password=POSTGRES_PASSWORD, - port=POSTGRES_PORT - ) - print("Connection à la base de données OK") - yield conn - except psycopg2.Error as e: - print(f"Erreur lors de la connexion à la base de données: {e}") - raise - finally: - if conn is not None: - conn.close() - print("Connexion à la base de données fermée") - - diff --git a/docker/fastapi/predict.py b/docker/fastapi/predict.py index b25c661..d5ac4f6 100755 --- a/docker/fastapi/predict.py +++ b/docker/fastapi/predict.py @@ -7,7 +7,6 @@ import numpy as np from rapidfuzz import process from fastapi import APIRouter, HTTPException -from database import get_db_connection from typing import Dict, Any, Optional from prometheus_client import Counter, Histogram, CollectorRegistry import time @@ -17,6 +16,7 @@ from dotenv import load_dotenv import requests import logging +from kubernetes import client, config # Charger les variables d'environnement à partir du fichier .env load_dotenv() @@ -27,6 +27,20 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +# Charger la configuration Kubernetes +config.load_incluster_config() + +# Définir le volume et le montage du volume +volume = client.V1Volume( + name="model-storage", + persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(claim_name="model-storage-pvc") +) + +volume_mount = client.V1VolumeMount( + name="model-storage", + mount_path="/models" +) + # ROUTEUR POUR GERER LES ROUTES PREDICT router = APIRouter( @@ -34,9 +48,26 @@ tags=['predict'] # Tag pour la documentation ) - # ENSEMBLE DES FONCTIONS UTILISEES +def load_config(): + """Charge la configuration de la base de données à partir des variables d'environnement.""" + return { + 'host': os.getenv('AIRFLOW_POSTGRESQL_SERVICE_HOST'), + 'database': os.getenv('DATABASE'), + 'user': os.getenv('USER'), + 'password': os.getenv('PASSWORD') + } + +def connect(config): + """Connecte au serveur PostgreSQL et retourne la connexion.""" + try: + conn = psycopg2.connect(**config) + print('Connected to the PostgreSQL server.') + return conn + except (psycopg2.DatabaseError, Exception) as error: + print(f"Connection error: {error}") + return None # Chargement des datasets vai bdd def fetch_ratings() -> pd.DataFrame: @@ -45,14 +76,20 @@ def fetch_ratings() -> pd.DataFrame: SELECT userId, movieId, rating FROM ratings """ - try: - with get_db_connection() as conn: - df = pd.read_sql_query(query, conn) - print("Enregistrements table ratings récupérés") - return df - except Exception as e: - print(f"Erreur lors de la récupération des enregistrements: {e}") - raise + config = load_config() + conn = connect(config) + + if conn is not None: + try: + with conn.cursor() as cur: + cur.execute(query) + df = pd.DataFrame(cur.fetchall(), columns=['userId', 'movieId', 'rating']) + print("Enregistrements table ratings récupérés") + return df + + except Exception as e: + print(f"Erreur lors de la récupération des enregistrements: {e}") + raise def fetch_movies() -> pd.DataFrame: """Récupère enregistrements de la table movies et les transforme en DataFrame.""" @@ -60,14 +97,19 @@ def fetch_movies() -> pd.DataFrame: SELECT movieId, title, genres FROM movies """ - try: - with get_db_connection() as conn: - df = pd.read_sql_query(query, conn) - print("Enregistrements table movies récupérés") - return df - except Exception as e: - print(f"Erreur lors de la récupération des enregistrements: {e}") - raise + config = load_config() + conn = connect(config) + + if conn is not None: + try: + with conn.cursor() as cur: + cur.execute(query) + df = pd.DataFrame(cur.fetchall(), columns=['movieId', 'title', 'genres']) + print("Enregistrements table movies récupérés") + return df + except Exception as e: + print(f"Erreur lors de la récupération des enregistrements: {e}") + raise def fetch_links() -> pd.DataFrame: """Récupère enregistrements de la table movies et les transforme en DataFrame.""" @@ -75,46 +117,29 @@ def fetch_links() -> pd.DataFrame: SELECT id, movieId, imdbId, tmdbId FROM links """ - try: - with get_db_connection() as conn: - df = pd.read_sql_query(query, conn) - print("Enregistrements table links récupérés") - return df - except Exception as e: - print(f"Erreur lors de la récupération des enregistrements: {e}") - raise -# def read_ratings(ratings_csv: str, data_dir: str = "/app/raw") -> pd.DataFrame: -# """Reads the CSV file containing movie ratings.""" -# data = pd.read_csv(os.path.join(data_dir, ratings_csv)) -# print("Dataset ratings loaded") -# return data - - - -# def read_movies(movies_csv: str, data_dir: str = "/app/raw") -> pd.DataFrame: -# """Reads the CSV file containing movie information.""" -# df = pd.read_csv(os.path.join(data_dir, movies_csv)) -# print("Dataset movies loaded") -# return df - -# def read_links(links_csv: str, data_dir: str = "/app/raw") -> pd.DataFrame: -# """Reads the CSV file containing movie information.""" -# df = pd.read_csv(os.path.join(data_dir, links_csv)) -# print("Dataset links loaded") -# return df + config = load_config() + conn = connect(config) + if conn is not None: + try: + with conn.cursor() as cur: + cur.execute(query) + df = pd.DataFrame(cur.fetchall(), columns=['id', 'movieId', 'imdbId', 'tmdbId']) + print("Enregistrements table links récupérés") + return df + except Exception as e: + print(f"Erreur lors de la récupération des enregistrements: {e}") + raise # Chargement du dernier modèle -def load_model(pkl_files, directory = "/app/model") : - """Charge le modèle à partir d'un répertoire.""" - # Vérifier si le répertoire existe - if not os.path.exists(directory): - raise FileNotFoundError(f"Le répertoire {directory} n'existe pas.") - # Charger le modèle - filepath = os.path.join(directory, pkl_files) - with open(filepath, 'rb') as file: +def load_model(model_name): + """Charge le modèle à partir du répertoire monté.""" + model_path = f"/models/{model_name}" + if not os.path.exists(model_path): + raise FileNotFoundError(f"Le modèle {model_name} n'existe pas dans {model_path}.") + with open(model_path, 'rb') as file: model = pickle.load(file) - print(f'Modèle chargé depuis {filepath}') + print(f'Modèle chargé depuis {model_path}') return model def create_X(df): @@ -306,15 +331,6 @@ def movie_finder(title): # CHARGEMENT DES DONNEES AU DEMARRAGE DE API print("DEBUT DES CHARGEMENTS") -# test de connection à la base de données -with get_db_connection() as conn: - with conn.cursor() as cur: - cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'") - tables = cur.fetchall() - # Imprimer les noms des tables - print("Tables présentes dans la base de données :") - for table in tables: - print(table[0]) # Chargement de nos dataframe ratings = fetch_ratings() movies = fetch_movies() diff --git a/docker/fastapi/requirements.txt b/docker/fastapi/requirements.txt index 5e6e670..2afda8f 100755 --- a/docker/fastapi/requirements.txt +++ b/docker/fastapi/requirements.txt @@ -1,17 +1,15 @@ -fastapi[standard]>=0.113.0,<0.114.0 -pydantic>=2.7.0,<3.0.0 -python-jose[cryptography] -python-multipart -numpy<2.0 -scikit-learn==1.5.1 -pandas +fastapi==0.95.2 # ou la dernière version stable +uvicorn==0.22.0 +pydantic==1.10.4 +passlib==1.7.4 +python-jose==3.3.0 +psycopg2-binary==2.9.6 +python-dotenv==1.0.0 prometheus-client prometheus-fastapi-instrumentator -requests -Pillow -matplotlib -rapidfuzz +kubernetes +numpy scipy -scikit-surprise -psycopg2-binary>=2.9.9 -passlib +pandas +rapidfuzz +scikit-surprise \ No newline at end of file diff --git a/docker/python_train_svd_model.py/Dockerfile b/docker/python_train_models.py/Dockerfile similarity index 100% rename from docker/python_train_svd_model.py/Dockerfile rename to docker/python_train_models.py/Dockerfile diff --git a/docker/python_train_svd_model.py/requirements.txt b/docker/python_train_models.py/requirements.txt similarity index 100% rename from docker/python_train_svd_model.py/requirements.txt rename to docker/python_train_models.py/requirements.txt diff --git a/docker/python_train_svd_model.py/train_models.py b/docker/python_train_models.py/train_models.py similarity index 100% rename from docker/python_train_svd_model.py/train_models.py rename to docker/python_train_models.py/train_models.py diff --git a/docker/streamlit/app/pages/4_Authentification.py b/docker/streamlit/app/pages/4_Authentification.py index 8b9e100..0444b82 100755 --- a/docker/streamlit/app/pages/4_Authentification.py +++ b/docker/streamlit/app/pages/4_Authentification.py @@ -39,7 +39,7 @@ # Convertir le nom d'utilisateur en minuscules avant l'envoi normalized_username = username.lower() - response = requests.post("http://fastapi:8000/auth/", json={"username": normalized_username, "email": email, "password": password}) + response = requests.post("http://fastapi/auth/", json={"username": normalized_username, "email": email, "password": password}) if response.status_code == 201: # Utilisateur créé avec succès st.success(f"Inscription réussie !") st.balloons() @@ -61,7 +61,7 @@ if submitted: try: response = requests.post( - "http://fastapi:8000/auth/token", + "http://fastapi/auth/token", data={"username": email, "password": password}) if response.status_code == 200: diff --git a/docker/streamlit/app/pages/5_Application.py b/docker/streamlit/app/pages/5_Application.py index ddfaaf6..4b06c97 100755 --- a/docker/streamlit/app/pages/5_Application.py +++ b/docker/streamlit/app/pages/5_Application.py @@ -23,7 +23,7 @@ token = st.session_state.get('token') response = requests.get( - "http://fastapi:8000/", + "http://fastapi/", json={"token": token}, headers=headers ) @@ -40,7 +40,7 @@ try: payload = {"userId": user_id} response = requests.post( - "http://fastapi:8000/predict/best_user_movies", + "http://fastapi/predict/best_user_movies", json=payload, headers=headers ) @@ -62,7 +62,7 @@ try: payload = {"userId": user_id} response = requests.post( - "http://fastapi:8000/predict/identified_user", + "http://fastapi/predict/identified_user", json=payload, headers=headers ) @@ -91,7 +91,7 @@ if st.button("Rechercher"): payload = {"userId": user_id, "movie_title": movie_name} response = requests.post( - "http://fastapi:8000/predict/similar_movies", + "http://fastapi/predict/similar_movies", json=payload, headers=headers ) diff --git a/kubernetes/deployments/fastapi-deployment.yml b/kubernetes/deployments/fastapi-deployment.yml index 0b441a2..5ff3902 100755 --- a/kubernetes/deployments/fastapi-deployment.yml +++ b/kubernetes/deployments/fastapi-deployment.yml @@ -2,7 +2,7 @@ apiVersion: apps/v1 kind: Deployment metadata: name: fastapi - namespace: reco-movies + namespace: api spec: replicas: 2 selector: @@ -18,38 +18,38 @@ spec: image: antoinepela/projet_reco_movies:fastapi-latest imagePullPolicy: Always # Force Kubernetes à toujours tirer l'image env: - - name: POSTGRES_PORT - value: "5432" - - name: POSTGRES_DB + - name: DATABASE + value: "postgres" + - name: USER + value: "postgres" + - name: PASSWORD valueFrom: - configMapKeyRef: - name: postgres-api-config - key: POSTGRES_DB - - name: POSTGRES_USER + secretKeyRef: + name: api-secrets + key: PASSWORD + - name: SECRET_KEY valueFrom: - configMapKeyRef: - name: postgres-api-config - key: POSTGRES_USER - - name: POSTGRES_PASSWORD + secretKeyRef: + name: api-secrets + key: SECRET_KEY + - name: ALGORITHM valueFrom: secretKeyRef: - name: postgres-api-secrets - key: POSTGRES_PASSWORD - - name: POSTGRES_HOST - value: "postgres-api-service" + name: api-secrets + key: ALGORITHM + - name: TMDB_API_TOKEN + valueFrom: + secretKeyRef: + name: api-secrets + key: TMDB_API_TOKEN - name: UVICORN_RELOAD value: "false" ports: - containerPort: 8000 volumeMounts: - - name: model-knn-volume - mountPath: /app/models/model_KNN.pkl - - name: model-svd-volume - mountPath: /app/models/model_SVD.pkl + - name: model-storage + mountPath: /models volumes: - - name: model-knn-volume - persistentVolumeClaim: - claimName: pvc-model-knn - - name: model-svd-volume + - name: model-storage persistentVolumeClaim: - claimName: pvc-model-svd + claimName: model-storage-pvc diff --git a/kubernetes/persistent-volumes/fastapi-persistent-volume.yml b/kubernetes/persistent-volumes/fastapi-persistent-volume.yml deleted file mode 100755 index 8632438..0000000 --- a/kubernetes/persistent-volumes/fastapi-persistent-volume.yml +++ /dev/null @@ -1,58 +0,0 @@ -# pv-model-knn -apiVersion: v1 -kind: PersistentVolume -metadata: - name: pv-model-knn -spec: - capacity: - storage: 1Gi - accessModes: - - ReadWriteOnce - hostPath: - path: "/home/antoine/jul24_cmlops_reco_film/ml/models/model_KNN.pkl" - volumeMode: Filesystem - ---- -# pv-model-svd -apiVersion: v1 -kind: PersistentVolume -metadata: - name: pv-model-svd -spec: - capacity: - storage: 1Gi - accessModes: - - ReadWriteOnce - hostPath: - path: "/home/antoine/jul24_cmlops_reco_film/ml/models/model_SVD.pkl" - volumeMode: Filesystem - ---- -# pvc-model-knn -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: pvc-model-knn - namespace: reco-movies -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi - ---- -# pvc-model-svd -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: pvc-model-svd - namespace: reco-movies -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi ---- - diff --git a/kubernetes/persistent-volumes/mlfow-storage-pvc.yml b/kubernetes/persistent-volumes/mlfow-storage-pvc.yml index c0f504e..6dda72d 100644 --- a/kubernetes/persistent-volumes/mlfow-storage-pvc.yml +++ b/kubernetes/persistent-volumes/mlfow-storage-pvc.yml @@ -1,7 +1,7 @@ apiVersion: v1 kind: PersistentVolumeClaim metadata: - name: models-pvc + name: model-storage-pvc namespace: airflow spec: accessModes: diff --git a/kubernetes/secrets/api-secrets.yaml b/kubernetes/secrets/api-secrets.yaml new file mode 100644 index 0000000..c78d1df --- /dev/null +++ b/kubernetes/secrets/api-secrets.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Secret +metadata: + name: api-secrets + namespace: api +type: Opaque +data: + PASSWORD: cG9zdGdyZXM= # base64 encoded ' +stringData: + SECRET_KEY: "azertyuiop^$*qsdfghjklmù