From bddda0823892d825b46c0df769fd0e1988a9acbb Mon Sep 17 00:00:00 2001 From: sys_softwarerecipes Date: Tue, 5 Mar 2024 07:21:35 +0530 Subject: [PATCH] adding chart evi-ai-inference-2.1.0 --- charts/evi-ai-inference-2.1.0/Chart.yaml | 6 + .../configs/AiInference.config | 13 ++ .../configs/media_storage_configmap.json | 24 +++ .../templates/configmap.yaml | 31 ++++ .../templates/deployment.yaml | 142 ++++++++++++++++++ .../templates/secret.yaml | 24 +++ .../templates/service.yaml | 34 +++++ .../templates/serviceaccount.yaml | 20 +++ charts/evi-ai-inference-2.1.0/values.yaml | 73 +++++++++ 9 files changed, 367 insertions(+) create mode 100644 charts/evi-ai-inference-2.1.0/Chart.yaml create mode 100644 charts/evi-ai-inference-2.1.0/configs/AiInference.config create mode 100644 charts/evi-ai-inference-2.1.0/configs/media_storage_configmap.json create mode 100644 charts/evi-ai-inference-2.1.0/templates/configmap.yaml create mode 100644 charts/evi-ai-inference-2.1.0/templates/deployment.yaml create mode 100644 charts/evi-ai-inference-2.1.0/templates/secret.yaml create mode 100644 charts/evi-ai-inference-2.1.0/templates/service.yaml create mode 100644 charts/evi-ai-inference-2.1.0/templates/serviceaccount.yaml create mode 100644 charts/evi-ai-inference-2.1.0/values.yaml diff --git a/charts/evi-ai-inference-2.1.0/Chart.yaml b/charts/evi-ai-inference-2.1.0/Chart.yaml new file mode 100644 index 0000000..5e98a84 --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +appVersion: 1.16.0 +description: A Helm chart for Kubernetes +name: evi-ai-inference +type: application +version: 2.1.0 diff --git a/charts/evi-ai-inference-2.1.0/configs/AiInference.config b/charts/evi-ai-inference-2.1.0/configs/AiInference.config new file mode 100644 index 0000000..8d395de --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/configs/AiInference.config @@ -0,0 +1,13 @@ +[Service] +logDir=. +logMaxFileSize=16777216 +logMaxFileCount=8 +logSeverity=0 +[HTTP] +address=0.0.0.0 +RESTfulPort=restServicePort +gRPCPort=grpcServicePort +[Pipeline] +maxConcurrentWorkload=maxConcurrentWorkloadPerReplica +pipelineManagerPoolSize=1 +maxPipelineLifetime=30 diff --git a/charts/evi-ai-inference-2.1.0/configs/media_storage_configmap.json b/charts/evi-ai-inference-2.1.0/configs/media_storage_configmap.json new file mode 100644 index 0000000..afb2b2b --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/configs/media_storage_configmap.json @@ -0,0 +1,24 @@ +{ + "version": 1, + "name": "row data source", + "video_image": { + "address": "minIOAddress_placeholder", + "port": "minIOPort_placeholder", + "rootUser": "rootUserPath_placeholder", + "rootPassword": "rootPasswordPath_placeholder" + }, + "video_image_attributes": { + "flask_server_address": "storageRestAddress_placeholder", + "flask_server_port": "storageRestPort_placeholder", + "prefix": "v1", + "media": "/media" + }, + "mediatype": [ + "image", + "video" + ], + "datasource": [ + "person", + "vehicle" + ] +} diff --git a/charts/evi-ai-inference-2.1.0/templates/configmap.yaml b/charts/evi-ai-inference-2.1.0/templates/configmap.yaml new file mode 100644 index 0000000..4d06167 --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/templates/configmap.yaml @@ -0,0 +1,31 @@ +# INTEL CONFIDENTIAL +# +# Copyright (C) 2022 Intel Corporation. +# +# This software and the related documents are Intel copyrighted materials, and your use of +# them is governed by the express license under which they were provided to you (License). +# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, +# disclose or transmit this software or the related documents without Intel's prior written permission. +# +# This software and the related documents are provided as is, with no express or implied warranties, +# other than those that are expressly stated in the License. + +--- +apiVersion: v1 +kind: ConfigMap +metadata: + namespace: {{ .Values.global.namespace.value }} + name: {{ .Values.configMap.AIInference.name }} +data: +{{ $v := .Values.configMap.AIInference.value }} +{{ (.Files.Glob $v).AsConfig | replace "restServicePort" (toString .Values.image.restfulPort) | replace "grpcServicePort" (toString .Values.image.grpcPort) | replace "maxConcurrentWorkloadPerReplica" (toString .Values.configMap.AIInference.maxConcurrentWorkloadPerReplica) | indent 2 }} + +--- +apiVersion: v1 +kind: ConfigMap +metadata: + namespace: {{ .Values.global.namespace.value }} + name: {{ .Values.configMap.mediaStorage.name }} +data: +{{ $v := .Values.configMap.mediaStorage.value }} +{{ (.Files.Glob $v).AsConfig | replace "minIOPort_placeholder" .Values.configMap.mediaStorage.minIOPort | replace "minIOAddress_placeholder" .Values.configMap.mediaStorage.minIOAddress | replace "storageRestPort_placeholder" .Values.configMap.hbaseStorage.storageRestPort | replace "storageRestAddress_placeholder" .Values.configMap.hbaseStorage.storageRestAddress | replace "rootUserPath_placeholder" .Values.configMap.mediaStorage.rootUserMountPath | replace "rootPasswordPath_placeholder" .Values.configMap.mediaStorage.rootPasswordMountPath | indent 2 }} diff --git a/charts/evi-ai-inference-2.1.0/templates/deployment.yaml b/charts/evi-ai-inference-2.1.0/templates/deployment.yaml new file mode 100644 index 0000000..244b3fe --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/templates/deployment.yaml @@ -0,0 +1,142 @@ +# INTEL CONFIDENTIAL +# +# Copyright (C) 2022-2023 Intel Corporation. +# +# This software and the related documents are Intel copyrighted materials, and your use of +# them is governed by the express license under which they were provided to you (License). +# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, +# disclose or transmit this software or the related documents without Intel's prior written permission. +# +# This software and the related documents are provided as is, with no express or implied warranties, +# other than those that are expressly stated in the License. + +--- +{{- if .Values.global.namespace.create -}} +apiVersion: v1 +kind: Namespace +metadata: + name: {{ .Values.global.namespace.value }} +{{ if .Values.global.namespace.istioInjection.create }} + labels: + istio-injection: enabled +{{ end }} +{{- end -}} + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Values.global.deployment.name }} + namespace: {{ .Values.global.namespace.value }} + annotations: + container.apparmor.security.beta.kubernetes.io/ai-inference: runtime/default +spec: + selector: + matchLabels: + app: ai-inference + version: v1 + replicas: {{ .Values.replicaCount }} + template: + metadata: + labels: + app: ai-inference + sidecar.istio.io/inject: "{{ .Values.istioInjection }}" + version: v1 + spec: + nodeSelector: + feature.node.kubernetes.io/cpu-cpuid.AVX512VNNI: 'true' + feature.node.kubernetes.io/cpu-cpuid.AVX2: 'true' + securityContext: + seccompProfile: + type: RuntimeDefault + serviceAccountName: {{ .Values.global.service.name }} + # tolerations: + # - key: "node" + # operator: "Equal" + # value: "hddl" + # effect: "NoSchedule" + affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - weight: 1 + podAffinityTerm: + labelSelector: + matchExpressions: + - key: app + operator: In + values: + - ai-inference + topologyKey: kubernetes.io/hostname + containers: + - name: {{ .Values.global.deployment.name }} + image: "{{ .Values.image.repository }}:{{ .Values.image.Version }}" + command: ["/opt/run_service.sh"] + imagePullPolicy: {{ .Values.image.pullPolicy }} + securityContext: + # readOnlyRootFilesystem: true + allowPrivilegeEscalation: false + resources: + limits: + cpu: {{ .Values.resources.limits.cpu }} + memory: {{ .Values.resources.limits.memory }} + requests: + cpu: {{ .Values.resources.requests.cpu }} + memory: {{ .Values.resources.requests.memory }} + env: + - name: FeatureStorage_HBaseVehicleFeatureServerAddress + value: "{{ .Values.configMap.hbaseStorage.hbaseAddr }}" + - name: FeatureStorage_HBaseVehicleFeatureServerPort + value: "{{ .Values.configMap.hbaseStorage.hbasePort }}" + - name: FeatureStorage_RestControllerBaseUrl + value: "{{ .Values.configMap.hbaseStorage.storageRestAddress }}:{{ .Values.configMap.hbaseStorage.storageRestPort }}" + livenessProbe: + httpGet: + path: /healthz + port: {{ .Values.image.restfulPort }} + initialDelaySeconds: 120 + periodSeconds: 60 + startupProbe: + httpGet: + path: /healthz + port: {{ .Values.image.restfulPort }} + failureThreshold: 5 + periodSeconds: 60 + volumeMounts: + - mountPath: /dev/dri/card0 + name: dri + securityContext: + privileged: true + ports: + - containerPort: {{ .Values.image.restfulPort }} + name: restful + - containerPort: {{ .Values.image.grpcPort }} + name: grpc + volumeMounts: + - mountPath: /opt/hce-core/middleware/ai/ai_inference/source/low_latency_server/AiInference.config + subPath: AiInference.config + name: config-volume + - mountPath: /opt/hce-configs/media_storage_configmap.json + subPath: media_storage_configmap.json + name: config-volume-ms + - name: media-storage-secret + mountPath: {{ .Values.configMap.mediaStorage.secretMountPath }} + readOnly: true + volumes: + - name: config-volume + configMap: + name: {{ .Values.configMap.AIInference.name }} + - name: dri + hostPath: + path: /dev/dri/card0 + - name: config-volume-ms + configMap: + name: {{ .Values.configMap.mediaStorage.name }} + - name: media-storage-secret + secret: + secretName: {{ .Values.global.secret.minio.name }} + optional: true + items: + - key: rootUser + path: rootUser + - key: rootPassword + path: rootPassword diff --git a/charts/evi-ai-inference-2.1.0/templates/secret.yaml b/charts/evi-ai-inference-2.1.0/templates/secret.yaml new file mode 100644 index 0000000..a53f62e --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/templates/secret.yaml @@ -0,0 +1,24 @@ +# INTEL CONFIDENTIAL +# +# Copyright (C) 2022 Intel Corporation. +# +# This software and the related documents are Intel copyrighted materials, and your use of +# them is governed by the express license under which they were provided to you (License). +# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, +# disclose or transmit this software or the related documents without Intel's prior written permission. +# +# This software and the related documents are provided as is, with no express or implied warranties, +# other than those that are expressly stated in the License. + +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ .Values.global.secret.minio.name }} + namespace: {{ .Values.global.namespace.value }} + labels: + app: ai-inference +type: Opaque +data: + rootUser: {{ .Values.configMap.mediaStorage.rootUser }} + rootPassword: {{ .Values.configMap.mediaStorage.rootPassword }} diff --git a/charts/evi-ai-inference-2.1.0/templates/service.yaml b/charts/evi-ai-inference-2.1.0/templates/service.yaml new file mode 100644 index 0000000..41aa911 --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/templates/service.yaml @@ -0,0 +1,34 @@ +# INTEL CONFIDENTIAL +# +# Copyright (C) 2022-2023 Intel Corporation. +# +# This software and the related documents are Intel copyrighted materials, and your use of +# them is governed by the express license under which they were provided to you (License). +# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, +# disclose or transmit this software or the related documents without Intel's prior written permission. +# +# This software and the related documents are provided as is, with no express or implied warranties, +# other than those that are expressly stated in the License. + +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.global.service.name }} + namespace: {{ .Values.global.namespace.value }} + labels: + app: ai-inference +spec: + ports: + - name: restful + port: {{ .Values.image.restfulPort }} + protocol: TCP + targetPort: {{ .Values.image.restfulPort }} + - name: grpc + port: {{ .Values.image.grpcPort }} + protocol: TCP + targetPort: {{ .Values.image.grpcPort }} + type: ClusterIP + selector: + app: ai-inference + version: v1 diff --git a/charts/evi-ai-inference-2.1.0/templates/serviceaccount.yaml b/charts/evi-ai-inference-2.1.0/templates/serviceaccount.yaml new file mode 100644 index 0000000..10b95c1 --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/templates/serviceaccount.yaml @@ -0,0 +1,20 @@ +# INTEL CONFIDENTIAL +# +# Copyright (C) 2022 Intel Corporation. +# +# This software and the related documents are Intel copyrighted materials, and your use of +# them is governed by the express license under which they were provided to you (License). +# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, +# disclose or transmit this software or the related documents without Intel's prior written permission. +# +# This software and the related documents are provided as is, with no express or implied warranties, +# other than those that are expressly stated in the License. + +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ .Values.global.service.name }} + namespace: {{ .Values.global.namespace.value }} + labels: + account: ai-inference diff --git a/charts/evi-ai-inference-2.1.0/values.yaml b/charts/evi-ai-inference-2.1.0/values.yaml new file mode 100644 index 0000000..7773f77 --- /dev/null +++ b/charts/evi-ai-inference-2.1.0/values.yaml @@ -0,0 +1,73 @@ +# INTEL CONFIDENTIAL +# +# Copyright (C) 2022-2023 Intel Corporation. +# +# This software and the related documents are Intel copyrighted materials, and your use of +# them is governed by the express license under which they were provided to you (License). +# Unless the License provides otherwise, you may not use, modify, copy, publish, distribute, +# disclose or transmit this software or the related documents without Intel's prior written permission. +# +# This software and the related documents are provided as is, with no express or implied warranties, +# other than those that are expressly stated in the License. + +--- +# Default values for ai-inference. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. +replicaCount: 1 + +global: + namespace: + create: false + value: hce-ai + istioInjection: + create: false + service: + name: evi-ai-inference + deployment: + name: evi-ai-inference + secret: + minio: + name: evi-minio-secret + +istioInjection: true + +configMap: + AIInference: + name: ai-inference + value: configs/AiInference.config + maxConcurrentWorkloadPerReplica: 8 + hbaseStorage: + name: hbase-storage + value: configs/hbase_storage_configmap.json + hbaseAddr: "my-hbase-hbase-master.dev" + hbasePort: 9090 + storageRestAddress: "storage-rest.storage-rest" + storageRestPort: "9900" + mediaStorage: + name: media-storage + value: configs/media_storage_configmap.json + rootUser: "" + rootPassword: "" + secretMountPath: /opt/hce-configs/credentials/minio + rootUserMountPath: /opt/hce-configs/credentials/minio/rootUser + rootPasswordMountPath: /opt/hce-configs/credentials/minio/rootPassword + minIOAddress: "minio-service.minio" + minIOPort: "9000" + + +image: + repository: ai-inference-cpu + Version: master-96802044e3c560ce54cdfb2c9d69e25819ef112a + pullPolicy: IfNotPresent + restfulPort: 50051 + grpcPort: 50052 + +resources: + requests: + cpu: 10m + memory: 300Mi + limits: + cpu: 120 + memory: 128Gi +