From 4a82380c1dfe6c86802c1c907ac7f58873851517 Mon Sep 17 00:00:00 2001 From: ishaansehgal99 Date: Thu, 31 Oct 2024 10:29:47 -0700 Subject: [PATCH 1/5] chore: Renaming to reflect updated repo --- .../kind-cluster/determine_models.py | 2 +- .../kind-cluster/docker-job-template.yaml | 2 +- .github/workflows/kind-cluster/main.py | 2 +- Makefile | 4 ++-- PROJECT | 4 ++-- README.md | 2 +- api/v1alpha1/params_validation.go | 4 ++-- api/v1alpha1/workspace_validation.go | 6 +++--- api/v1alpha1/workspace_validation_test.go | 8 ++++---- charts/DemoUI/inference/Chart.yaml | 2 +- .../inference/templates/deployment.yaml | 2 +- charts/kaito/ragengine/Chart.yaml | 4 ++-- charts/kaito/workspace/Chart.yaml | 4 ++-- .../workspace/templates/lora-params.yaml | 2 +- .../workspace/templates/qlora-params.yaml | 2 +- cmd/ragengine/main.go | 8 ++++---- cmd/workspace/main.go | 14 ++++++------- cmd/workspace/models.go | 12 +++++------ .../Dockerfile.reference | 2 +- .../custom-model-integration-guide.md | 2 +- docs/inference/README.md | 2 +- docs/proposals/20240527-phi3-instruct.md | 2 +- docs/tuning/README.md | 6 +++--- go.mod | 2 +- pkg/controllers/ragengine_controller.go | 12 +++++------ pkg/controllers/ragengine_controller_test.go | 8 ++++---- pkg/controllers/ragengine_gc_finalizer.go | 10 +++++----- pkg/controllers/ragengine_status.go | 2 +- pkg/controllers/ragengine_status_test.go | 4 ++-- pkg/controllers/workspace_controller.go | 20 +++++++++---------- pkg/controllers/workspace_controller_test.go | 14 ++++++------- pkg/controllers/workspace_gc_finalizer.go | 10 +++++----- .../workspace_gc_finalizer_test.go | 8 ++++---- pkg/controllers/workspace_status.go | 2 +- pkg/controllers/workspace_status_test.go | 4 ++-- pkg/featuregates/featuregates.go | 2 +- pkg/inference/preset-inferences.go | 10 +++++----- pkg/inference/preset-inferences_test.go | 10 +++++----- pkg/inference/template_inference.go | 4 ++-- pkg/inference/template_inference_test.go | 2 +- pkg/machine/machine.go | 6 +++--- pkg/machine/machine_test.go | 2 +- pkg/nodeclaim/nodeclaim.go | 6 +++--- pkg/nodeclaim/nodeclaim_test.go | 6 +++--- pkg/resources/manifests.go | 2 +- pkg/resources/manifests_test.go | 4 ++-- pkg/resources/nodes.go | 2 +- pkg/resources/nodes_test.go | 2 +- pkg/resources/resources_test.go | 2 +- pkg/sku/cloud_sku_handler.go | 2 +- pkg/tuning/preset-tuning.go | 10 +++++----- pkg/tuning/preset-tuning_test.go | 10 +++++----- pkg/utils/common-preset.go | 2 +- pkg/utils/common.go | 4 ++-- pkg/utils/plugin/plugin.go | 2 +- pkg/utils/test/testModel.go | 4 ++-- pkg/utils/test/testUtils.go | 2 +- pkg/webhooks/webhooks.go | 2 +- presets/models/falcon/model.go | 10 +++++----- presets/models/llama2/README.md | 2 +- presets/models/llama2/model.go | 8 ++++---- presets/models/llama2chat/README.md | 2 +- presets/models/llama2chat/model.go | 8 ++++---- presets/models/mistral/model.go | 8 ++++---- presets/models/phi2/model.go | 8 ++++---- presets/models/phi3/model.go | 8 ++++---- presets/test/falcon-benchmark/README.md | 2 +- test/e2e/e2e_test.go | 2 +- test/e2e/inference_with_adapters_test.go | 4 ++-- test/e2e/preset_test.go | 4 ++-- test/e2e/utils/cluster.go | 2 +- test/e2e/utils/machine.go | 2 +- test/e2e/utils/nodeclaim.go | 2 +- test/e2e/utils/utils.go | 2 +- test/e2e/webhook_test.go | 4 ++-- 75 files changed, 185 insertions(+), 185 deletions(-) diff --git a/.github/workflows/kind-cluster/determine_models.py b/.github/workflows/kind-cluster/determine_models.py index 9cbab7d31..6433b4e60 100644 --- a/.github/workflows/kind-cluster/determine_models.py +++ b/.github/workflows/kind-cluster/determine_models.py @@ -21,7 +21,7 @@ def read_yaml(file_path): YAML_PR = read_yaml(supp_models_yaml) # Format: {falcon-7b : {model_name:falcon-7b, type:text-generation, version: #, tag: #}} MODELS = {model['name']: model for model in YAML_PR['models']} -KAITO_REPO_URL = "https://github.com/Azure/kaito.git" +KAITO_REPO_URL = "https://github.com/kaito-repo/kaito.git" def set_multiline_output(name, value): with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: diff --git a/.github/workflows/kind-cluster/docker-job-template.yaml b/.github/workflows/kind-cluster/docker-job-template.yaml index 7334057f8..524b9a8ea 100644 --- a/.github/workflows/kind-cluster/docker-job-template.yaml +++ b/.github/workflows/kind-cluster/docker-job-template.yaml @@ -27,7 +27,7 @@ spec: # Additional setup apk add --no-cache git - git clone -b $PR_BRANCH https://github.com/Azure/kaito.git + git clone -b $PR_BRANCH https://github.com/kaito-project/kaito.git # Print Project Files echo 'Print Project Files' diff --git a/.github/workflows/kind-cluster/main.py b/.github/workflows/kind-cluster/main.py index 46ca38552..6d2713cf0 100644 --- a/.github/workflows/kind-cluster/main.py +++ b/.github/workflows/kind-cluster/main.py @@ -6,7 +6,7 @@ import time from pathlib import Path -KAITO_REPO_URL = "https://github.com/Azure/kaito.git" +KAITO_REPO_URL = "https://github.com/kaito-project/kaito.git" WEIGHTS_FOLDER = os.environ.get("WEIGHTS_DIR", None) def get_weights_path(model_name): diff --git a/Makefile b/Makefile index 31b345046..f09dc53c3 100644 --- a/Makefile +++ b/Makefile @@ -243,8 +243,8 @@ docker-build-dataset: docker-buildx .PHONY: docker-build-llm-reference-preset docker-build-llm-reference-preset: docker-buildx docker buildx build \ - -t ghcr.io/azure/kaito/llm-reference-preset:$(VERSION) \ - -t ghcr.io/azure/kaito/llm-reference-preset:latest \ + -t ghcr.io/kaito-repo/kaito/llm-reference-preset:$(VERSION) \ + -t ghcr.io/kaito-repo/kaito/llm-reference-preset:latest \ -f docs/custom-model-integration/Dockerfile.reference \ --build-arg MODEL_TYPE=text-generation \ --build-arg VERSION=$(VERSION) . diff --git a/PROJECT b/PROJECT index 8e08ac818..d604b4b2e 100644 --- a/PROJECT +++ b/PROJECT @@ -6,7 +6,7 @@ domain: io layout: - go.kubebuilder.io/v4 projectName: kaito -repo: github.com/azure/kaito +repo: github.com/kaito-project/kaito resources: - api: crdVersion: v1 @@ -15,6 +15,6 @@ resources: domain: io group: kaito kind: Workspace - path: github.com/azure/kaito/api/v1alpha1 + path: github.com/kaito-project/kaito/api/v1alpha1 version: v1alpha1 version: "3" diff --git a/README.md b/README.md index 3f16e8730..d44219747 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Kubernetes AI Toolchain Operator (Kaito) ![GitHub Release](https://img.shields.io/github/v/release/Azure/kaito) -[![Go Report Card](https://goreportcard.com/badge/github.com/Azure/kaito)](https://goreportcard.com/report/github.com/Azure/kaito) +[![Go Report Card](https://goreportcard.com/badge/github.com/kaito-project/kaito)](https://goreportcard.com/report/github.com/kaito-project/kaito) ![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/Azure/kaito) [![codecov](https://codecov.io/gh/Azure/kaito/graph/badge.svg?token=XAQLLPB2AR)](https://codecov.io/gh/Azure/kaito) diff --git a/api/v1alpha1/params_validation.go b/api/v1alpha1/params_validation.go index e4b034e9a..e3a0e6a88 100644 --- a/api/v1alpha1/params_validation.go +++ b/api/v1alpha1/params_validation.go @@ -10,8 +10,8 @@ import ( "reflect" "strings" - "github.com/azure/kaito/pkg/k8sclient" - "github.com/azure/kaito/pkg/utils" + "github.com/kaito-project/kaito/pkg/k8sclient" + "github.com/kaito-project/kaito/pkg/utils" "gopkg.in/yaml.v2" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/errors" diff --git a/api/v1alpha1/workspace_validation.go b/api/v1alpha1/workspace_validation.go index 07e45c9b7..9a4183c69 100644 --- a/api/v1alpha1/workspace_validation.go +++ b/api/v1alpha1/workspace_validation.go @@ -12,10 +12,10 @@ import ( "strconv" "strings" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/consts" - "github.com/azure/kaito/pkg/utils" - "github.com/azure/kaito/pkg/utils/plugin" + "github.com/kaito-project/kaito/pkg/utils" + "github.com/kaito-project/kaito/pkg/utils/plugin" admissionregistrationv1 "k8s.io/api/admissionregistration/v1" "k8s.io/apimachinery/pkg/api/resource" diff --git a/api/v1alpha1/workspace_validation_test.go b/api/v1alpha1/workspace_validation_test.go index bf9f8ea3e..63eefd483 100644 --- a/api/v1alpha1/workspace_validation_test.go +++ b/api/v1alpha1/workspace_validation_test.go @@ -10,12 +10,12 @@ import ( "strings" "testing" - "github.com/azure/kaito/pkg/k8sclient" - "github.com/azure/kaito/pkg/utils/consts" - "github.com/azure/kaito/pkg/utils/plugin" + "github.com/kaito-project/kaito/pkg/k8sclient" + "github.com/kaito-project/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/plugin" "k8s.io/apimachinery/pkg/runtime" - "github.com/azure/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/model" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/controller-runtime/pkg/client/fake" diff --git a/charts/DemoUI/inference/Chart.yaml b/charts/DemoUI/inference/Chart.yaml index 8b6bb96bf..3c3ff715b 100644 --- a/charts/DemoUI/inference/Chart.yaml +++ b/charts/DemoUI/inference/Chart.yaml @@ -5,7 +5,7 @@ type: application version: 0.1.0 appVersion: "0.1.0" sources: - - https://github.com/Azure/kaito + - https://github.com/kaito-project/kaito maintainers: - name: ishaansehgal99 email: ishaanforthewin@gmail.com diff --git a/charts/DemoUI/inference/templates/deployment.yaml b/charts/DemoUI/inference/templates/deployment.yaml index 143f2a887..71c50d383 100644 --- a/charts/DemoUI/inference/templates/deployment.yaml +++ b/charts/DemoUI/inference/templates/deployment.yaml @@ -39,7 +39,7 @@ spec: - | mkdir -p /app/frontend && \ pip install chainlit requests && \ - wget -O /app/frontend/inference.py https://raw.githubusercontent.com/Azure/kaito/main/demo/inferenceUI/chainlit.py && \ + wget -O /app/frontend/inference.py https://raw.githubusercontent.com/kaito-project/kaito/main/demo/inferenceUI/chainlit.py && \ chainlit run frontend/inference.py -w env: - name: WORKSPACE_SERVICE_URL diff --git a/charts/kaito/ragengine/Chart.yaml b/charts/kaito/ragengine/Chart.yaml index ceabb1f53..eb944a780 100644 --- a/charts/kaito/ragengine/Chart.yaml +++ b/charts/kaito/ragengine/Chart.yaml @@ -13,9 +13,9 @@ version: 0.0.1 # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. appVersion: 0.0.1 -home: https://github.com/Azure/kaito +home: https://github.com/kaito-project/kaito sources: - - https://github.com/Azure/kaito + - https://github.com/kaito-project/kaito maintainers: - name: Fei-Guo email: vrgf2003@gmail.com diff --git a/charts/kaito/workspace/Chart.yaml b/charts/kaito/workspace/Chart.yaml index 282e4aa04..45ee9f6df 100644 --- a/charts/kaito/workspace/Chart.yaml +++ b/charts/kaito/workspace/Chart.yaml @@ -13,9 +13,9 @@ version: 0.3.1 # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. appVersion: 0.3.1 -home: https://github.com/Azure/kaito +home: https://github.com/kaito-project/kaito sources: - - https://github.com/Azure/kaito + - https://github.com/kaito-project/kaito maintainers: - name: Fei-Guo email: vrgf2003@gmail.com diff --git a/charts/kaito/workspace/templates/lora-params.yaml b/charts/kaito/workspace/templates/lora-params.yaml index 91daa8366..d6a764c3b 100644 --- a/charts/kaito/workspace/templates/lora-params.yaml +++ b/charts/kaito/workspace/templates/lora-params.yaml @@ -29,7 +29,7 @@ data: DataCollator: # Configurable Parameters: https://huggingface.co/docs/transformers/v4.40.2/en/main_classes/data_collator#transformers.DataCollatorForLanguageModeling mlm: true # Default setting; included to show DataCollator can be updated. - DatasetConfig: # Configurable Parameters: https://github.com/Azure/kaito/blob/main/presets/tuning/text-generation/cli.py#L44 + DatasetConfig: # Configurable Parameters: https://github.com/kaito-project/kaito/blob/main/presets/tuning/text-generation/cli.py#L44 shuffle_dataset: true train_test_split: 1 # Default to using all data for fine-tuning due to strong pre-trained baseline and typically limited fine-tuning data # Expected Dataset format: diff --git a/charts/kaito/workspace/templates/qlora-params.yaml b/charts/kaito/workspace/templates/qlora-params.yaml index d79c4e723..88bf19da7 100644 --- a/charts/kaito/workspace/templates/qlora-params.yaml +++ b/charts/kaito/workspace/templates/qlora-params.yaml @@ -32,7 +32,7 @@ data: DataCollator: # Configurable Parameters: https://huggingface.co/docs/transformers/v4.40.2/en/main_classes/data_collator#transformers.DataCollatorForLanguageModeling mlm: true # Default setting; included to show DataCollator can be updated. - DatasetConfig: # Configurable Parameters: https://github.com/Azure/kaito/blob/main/presets/tuning/text-generation/cli.py#L44 + DatasetConfig: # Configurable Parameters: https://github.com/kaito-project/kaito/blob/main/presets/tuning/text-generation/cli.py#L44 shuffle_dataset: true train_test_split: 1 # Default to using all data for fine-tuning due to strong pre-trained baseline and typically limited fine-tuning data # Expected Dataset format: diff --git a/cmd/ragengine/main.go b/cmd/ragengine/main.go index 84017aec2..ce3594c91 100644 --- a/cmd/ragengine/main.go +++ b/cmd/ragengine/main.go @@ -14,11 +14,11 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - "github.com/azure/kaito/pkg/k8sclient" + "github.com/kaito-project/kaito/pkg/k8sclient" metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" - "github.com/azure/kaito/pkg/controllers" - "github.com/azure/kaito/pkg/webhooks" + "github.com/kaito-project/kaito/pkg/controllers" + "github.com/kaito-project/kaito/pkg/webhooks" "k8s.io/api/apps/v1beta1" "k8s.io/klog/v2" "knative.dev/pkg/injection/sharedmain" @@ -36,7 +36,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/healthz" "sigs.k8s.io/controller-runtime/pkg/log/zap" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" //+kubebuilder:scaffold:imports ) diff --git a/cmd/workspace/main.go b/cmd/workspace/main.go index ce4fbee43..1db35bf6d 100644 --- a/cmd/workspace/main.go +++ b/cmd/workspace/main.go @@ -13,16 +13,16 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/k8sclient" - "github.com/azure/kaito/pkg/nodeclaim" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/k8sclient" + "github.com/kaito-project/kaito/pkg/nodeclaim" + "github.com/kaito-project/kaito/pkg/utils/consts" metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" "sigs.k8s.io/karpenter/pkg/apis/v1beta1" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - "github.com/azure/kaito/pkg/controllers" - "github.com/azure/kaito/pkg/webhooks" + "github.com/kaito-project/kaito/pkg/controllers" + "github.com/kaito-project/kaito/pkg/webhooks" "k8s.io/klog/v2" "knative.dev/pkg/injection/sharedmain" "knative.dev/pkg/webhook" @@ -39,7 +39,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/healthz" "sigs.k8s.io/controller-runtime/pkg/log/zap" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" //+kubebuilder:scaffold:imports ) diff --git a/cmd/workspace/models.go b/cmd/workspace/models.go index a5ec84624..0ca68c1b0 100644 --- a/cmd/workspace/models.go +++ b/cmd/workspace/models.go @@ -3,10 +3,10 @@ package main import ( - _ "github.com/azure/kaito/presets/models/falcon" - _ "github.com/azure/kaito/presets/models/llama2" - _ "github.com/azure/kaito/presets/models/llama2chat" - _ "github.com/azure/kaito/presets/models/mistral" - _ "github.com/azure/kaito/presets/models/phi2" - _ "github.com/azure/kaito/presets/models/phi3" + _ "github.com/kaito-project/kaito/presets/models/falcon" + _ "github.com/kaito-project/kaito/presets/models/llama2" + _ "github.com/kaito-project/kaito/presets/models/llama2chat" + _ "github.com/kaito-project/kaito/presets/models/mistral" + _ "github.com/kaito-project/kaito/presets/models/phi2" + _ "github.com/kaito-project/kaito/presets/models/phi3" ) diff --git a/docs/custom-model-integration/Dockerfile.reference b/docs/custom-model-integration/Dockerfile.reference index 0478342f7..5c7b0a355 100644 --- a/docs/custom-model-integration/Dockerfile.reference +++ b/docs/custom-model-integration/Dockerfile.reference @@ -1,7 +1,7 @@ FROM python:3.10-slim@sha256:684b1aaf96a7942b3c3af438d162e0baa3510aa7af25ad76d238e0c746bdec79 # Specify the repository source URL for reference and access in Kaito packages. -LABEL org.opencontainers.image.source=https://github.com/azure/kaito +LABEL org.opencontainers.image.source=https://github.com/kaito-project/kaito ARG MODEL_TYPE ARG VERSION diff --git a/docs/custom-model-integration/custom-model-integration-guide.md b/docs/custom-model-integration/custom-model-integration-guide.md index 57737ac7e..ea4bab468 100644 --- a/docs/custom-model-integration/custom-model-integration-guide.md +++ b/docs/custom-model-integration/custom-model-integration-guide.md @@ -12,7 +12,7 @@ If you want to avoid building a Docker image with model weights, use our pre-bui ### Step 1: Clone the Repository ```sh -git clone https://github.com/Azure/kaito.git +git clone https://github.com/kaito-project/kaito.git ``` ### Step 2: Download Your Private/Custom Model Weights diff --git a/docs/inference/README.md b/docs/inference/README.md index 68259fdbf..3c6d34d8e 100644 --- a/docs/inference/README.md +++ b/docs/inference/README.md @@ -67,7 +67,7 @@ Currently, only images are supported as adapter sources. The `strength` field sp **Note:** When building a container image for an existing adapter, ensure all adapter files are copied to the **/data** directory inside the container. -For detailed `InferenceSpec` API definitions, refer to the [documentation](https://github.com/Azure/kaito/blob/2ccc93daf9d5385649f3f219ff131ee7c9c47f3e/api/v1alpha1/workspace_types.go#L75). +For detailed `InferenceSpec` API definitions, refer to the [documentation](https://github.com/kaito-project/kaito/blob/2ccc93daf9d5385649f3f219ff131ee7c9c47f3e/api/v1alpha1/workspace_types.go#L75). # Inference workload diff --git a/docs/proposals/20240527-phi3-instruct.md b/docs/proposals/20240527-phi3-instruct.md index 114b18613..0a7efe6a5 100644 --- a/docs/proposals/20240527-phi3-instruct.md +++ b/docs/proposals/20240527-phi3-instruct.md @@ -50,4 +50,4 @@ This section describes how to configure the runtime framework to support the inf # History - [x] 05/27/2024: Open proposal PR. -- [x] 06/13/2024: Phi-3 Mini Merged [#469](https://github.com/Azure/kaito/pull/469) +- [x] 06/13/2024: Phi-3 Mini Merged [#469](https://github.com/kaito-project/kaito/pull/469) diff --git a/docs/tuning/README.md b/docs/tuning/README.md index ff48e11d0..4613c5164 100644 --- a/docs/tuning/README.md +++ b/docs/tuning/README.md @@ -35,7 +35,7 @@ tuning: ``` -The detailed `TuningSpec` API definitions can be found [here](https://github.com/Azure/kaito/blob/2ccc93daf9d5385649f3f219ff131ee7c9c47f3e/api/v1alpha1/workspace_types.go#L145). +The detailed `TuningSpec` API definitions can be found [here](https://github.com/kaito-project/kaito/blob/2ccc93daf9d5385649f3f219ff131ee7c9c47f3e/api/v1alpha1/workspace_types.go#L145). ### Tuning configurations Kaito provides default tuning configurations for different tuning methods. They are managed by Kubernetes configmaps. @@ -73,7 +73,7 @@ TrainingArguments([full list](https://huggingface.co/docs/transformers/v4.40.2/e DataCollator([full list](https://huggingface.co/docs/transformers/v4.40.2/en/main_classes/data_collator#transformers.DataCollatorForLanguageModeling)) - mlm: Masked language modeling flag. -DatasetConfig([full list](https://github.com/Azure/kaito/blob/main/presets/tuning/text-generation/cli.py#L44)) +DatasetConfig([full list](https://github.com/kaito-project/kaito/blob/main/presets/tuning/text-generation/cli.py#L44)) - shuffle_dataset: Whether to shuffle the dataset. - train_test_split: Proportion of data used for training, typically set to 1 for using all data. @@ -118,7 +118,7 @@ Figure 1. Kaito tuning pod structure. - Sidecar container: It is introduced to support automatically pushing the tuning results to a container registry. This container, with `docker` installed, runs a script to periodically check the training progress. Once the training is done, indicated by a sentinel file created by the training process, the script builds a container image containing the training results and pushes the image to the specified container registry. -- Main container: It uses one of the supported model images. The image entry launches the [fine\_tuning.py](https://github.com/Azure/kaito/blob/main/presets/tuning/text-generation/fine_tuning.py) script. +- Main container: It uses one of the supported model images. The image entry launches the [fine\_tuning.py](https://github.com/kaito-project/kaito/blob/main/presets/tuning/text-generation/fine_tuning.py) script. All three containers use shared local volumes (by mounting the same `EmptyDir` volumes), hence file copies between containers are avoided. diff --git a/go.mod b/go.mod index 6b9b843bb..0f24dd16b 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/azure/kaito +module github.com/kaito-project/kaito go 1.22.3 diff --git a/pkg/controllers/ragengine_controller.go b/pkg/controllers/ragengine_controller.go index 9cc873d4c..87a1f7c00 100644 --- a/pkg/controllers/ragengine_controller.go +++ b/pkg/controllers/ragengine_controller.go @@ -10,12 +10,12 @@ import ( "time" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/machine" - "github.com/azure/kaito/pkg/nodeclaim" - "github.com/azure/kaito/pkg/resources" - "github.com/azure/kaito/pkg/utils/consts" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/machine" + "github.com/kaito-project/kaito/pkg/nodeclaim" + "github.com/kaito-project/kaito/pkg/resources" + "github.com/kaito-project/kaito/pkg/utils/consts" "github.com/go-logr/logr" "github.com/samber/lo" corev1 "k8s.io/api/core/v1" diff --git a/pkg/controllers/ragengine_controller_test.go b/pkg/controllers/ragengine_controller_test.go index 80bcc4cd7..4c34c994a 100644 --- a/pkg/controllers/ragengine_controller_test.go +++ b/pkg/controllers/ragengine_controller_test.go @@ -13,10 +13,10 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/utils/consts" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/mock" "gotest.tools/assert" corev1 "k8s.io/api/core/v1" diff --git a/pkg/controllers/ragengine_gc_finalizer.go b/pkg/controllers/ragengine_gc_finalizer.go index 561629556..c8e729f89 100644 --- a/pkg/controllers/ragengine_gc_finalizer.go +++ b/pkg/controllers/ragengine_gc_finalizer.go @@ -6,11 +6,11 @@ package controllers import ( "context" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/machine" - "github.com/azure/kaito/pkg/nodeclaim" - "github.com/azure/kaito/pkg/utils/consts" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/machine" + "github.com/kaito-project/kaito/pkg/nodeclaim" + "github.com/kaito-project/kaito/pkg/utils/consts" "k8s.io/klog/v2" ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/client" diff --git a/pkg/controllers/ragengine_status.go b/pkg/controllers/ragengine_status.go index 2db7e8c1f..1a4c4d9c1 100644 --- a/pkg/controllers/ragengine_status.go +++ b/pkg/controllers/ragengine_status.go @@ -8,7 +8,7 @@ import ( "reflect" "sort" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" "github.com/samber/lo" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/errors" diff --git a/pkg/controllers/ragengine_status_test.go b/pkg/controllers/ragengine_status_test.go index 00a7bc447..084aa5426 100644 --- a/pkg/controllers/ragengine_status_test.go +++ b/pkg/controllers/ragengine_status_test.go @@ -8,8 +8,8 @@ import ( "errors" "testing" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/utils/test" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" apierrors "k8s.io/apimachinery/pkg/api/errors" diff --git a/pkg/controllers/workspace_controller.go b/pkg/controllers/workspace_controller.go index 77a7d3f5c..1049b7345 100644 --- a/pkg/controllers/workspace_controller.go +++ b/pkg/controllers/workspace_controller.go @@ -16,20 +16,20 @@ import ( "sigs.k8s.io/controller-runtime/pkg/controller/controllerutil" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/nodeclaim" - "github.com/azure/kaito/pkg/tuning" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/nodeclaim" + "github.com/kaito-project/kaito/pkg/tuning" + "github.com/kaito-project/kaito/pkg/utils/consts" batchv1 "k8s.io/api/batch/v1" "sigs.k8s.io/karpenter/pkg/apis/v1beta1" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/machine" - "github.com/azure/kaito/pkg/resources" - "github.com/azure/kaito/pkg/utils" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/machine" + "github.com/kaito-project/kaito/pkg/resources" + "github.com/kaito-project/kaito/pkg/utils" + "github.com/kaito-project/kaito/pkg/utils/plugin" "github.com/go-logr/logr" "github.com/samber/lo" appsv1 "k8s.io/api/apps/v1" diff --git a/pkg/controllers/workspace_controller_test.go b/pkg/controllers/workspace_controller_test.go index fe5b0a55a..cc09d1c5a 100644 --- a/pkg/controllers/workspace_controller_test.go +++ b/pkg/controllers/workspace_controller_test.go @@ -17,13 +17,13 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - "github.com/azure/kaito/api/v1alpha1" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/machine" - "github.com/azure/kaito/pkg/nodeclaim" - "github.com/azure/kaito/pkg/utils/consts" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/machine" + "github.com/kaito-project/kaito/pkg/nodeclaim" + "github.com/kaito-project/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/mock" "gotest.tools/assert" appsv1 "k8s.io/api/apps/v1" diff --git a/pkg/controllers/workspace_gc_finalizer.go b/pkg/controllers/workspace_gc_finalizer.go index 96f47843f..61803ec54 100644 --- a/pkg/controllers/workspace_gc_finalizer.go +++ b/pkg/controllers/workspace_gc_finalizer.go @@ -6,11 +6,11 @@ package controllers import ( "context" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/machine" - "github.com/azure/kaito/pkg/nodeclaim" - "github.com/azure/kaito/pkg/utils/consts" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/machine" + "github.com/kaito-project/kaito/pkg/nodeclaim" + "github.com/kaito-project/kaito/pkg/utils/consts" "k8s.io/klog/v2" ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/client" diff --git a/pkg/controllers/workspace_gc_finalizer_test.go b/pkg/controllers/workspace_gc_finalizer_test.go index 1824082f7..04a8f8bcf 100644 --- a/pkg/controllers/workspace_gc_finalizer_test.go +++ b/pkg/controllers/workspace_gc_finalizer_test.go @@ -6,10 +6,10 @@ import ( "testing" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/featuregates" - "github.com/azure/kaito/pkg/utils/consts" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/featuregates" + "github.com/kaito-project/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/mock" "gotest.tools/assert" "sigs.k8s.io/controller-runtime/pkg/client" diff --git a/pkg/controllers/workspace_status.go b/pkg/controllers/workspace_status.go index b972ceaa9..8457094fd 100644 --- a/pkg/controllers/workspace_status.go +++ b/pkg/controllers/workspace_status.go @@ -8,7 +8,7 @@ import ( "reflect" "sort" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" "github.com/samber/lo" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/errors" diff --git a/pkg/controllers/workspace_status_test.go b/pkg/controllers/workspace_status_test.go index d90d2b603..170e69b54 100644 --- a/pkg/controllers/workspace_status_test.go +++ b/pkg/controllers/workspace_status_test.go @@ -8,8 +8,8 @@ import ( "errors" "testing" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/utils/test" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" apierrors "k8s.io/apimachinery/pkg/api/errors" diff --git a/pkg/featuregates/featuregates.go b/pkg/featuregates/featuregates.go index dd9f67957..2161210e1 100644 --- a/pkg/featuregates/featuregates.go +++ b/pkg/featuregates/featuregates.go @@ -7,7 +7,7 @@ import ( "errors" "fmt" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/consts" cliflag "k8s.io/component-base/cli/flag" ) diff --git a/pkg/inference/preset-inferences.go b/pkg/inference/preset-inferences.go index 9dc6da889..6bd175fb4 100644 --- a/pkg/inference/preset-inferences.go +++ b/pkg/inference/preset-inferences.go @@ -8,12 +8,12 @@ import ( "os" "strconv" - "github.com/azure/kaito/pkg/utils" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils" + "github.com/kaito-project/kaito/pkg/utils/consts" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/resources" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/resources" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" "k8s.io/apimachinery/pkg/util/intstr" diff --git a/pkg/inference/preset-inferences_test.go b/pkg/inference/preset-inferences_test.go index f643b8a67..0a532ad20 100644 --- a/pkg/inference/preset-inferences_test.go +++ b/pkg/inference/preset-inferences_test.go @@ -10,13 +10,13 @@ import ( "strings" "testing" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/consts" - "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/utils/test" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" "github.com/stretchr/testify/mock" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" diff --git a/pkg/inference/template_inference.go b/pkg/inference/template_inference.go index 68c665a1b..c40a79f48 100644 --- a/pkg/inference/template_inference.go +++ b/pkg/inference/template_inference.go @@ -5,8 +5,8 @@ package inference import ( "context" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/resources" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/resources" "sigs.k8s.io/controller-runtime/pkg/client" ) diff --git a/pkg/inference/template_inference_test.go b/pkg/inference/template_inference_test.go index 3cad38db2..ffa93e4cb 100644 --- a/pkg/inference/template_inference_test.go +++ b/pkg/inference/template_inference_test.go @@ -5,7 +5,7 @@ package inference import ( "context" "errors" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/pkg/utils/test" "testing" "github.com/stretchr/testify/mock" diff --git a/pkg/machine/machine.go b/pkg/machine/machine.go index b996d9a82..30a62ef35 100644 --- a/pkg/machine/machine.go +++ b/pkg/machine/machine.go @@ -11,9 +11,9 @@ import ( "time" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/resources" - "github.com/azure/kaito/pkg/utils/consts" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/resources" + "github.com/kaito-project/kaito/pkg/utils/consts" "github.com/samber/lo" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" diff --git a/pkg/machine/machine_test.go b/pkg/machine/machine_test.go index 030eec07f..694331d1c 100644 --- a/pkg/machine/machine_test.go +++ b/pkg/machine/machine_test.go @@ -7,7 +7,7 @@ import ( "errors" "testing" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" "github.com/stretchr/testify/mock" diff --git a/pkg/nodeclaim/nodeclaim.go b/pkg/nodeclaim/nodeclaim.go index 9d0cd60c9..78b8f6427 100644 --- a/pkg/nodeclaim/nodeclaim.go +++ b/pkg/nodeclaim/nodeclaim.go @@ -14,9 +14,9 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/resources" - "github.com/azure/kaito/pkg/utils/consts" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/resources" + "github.com/kaito-project/kaito/pkg/utils/consts" "github.com/samber/lo" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" diff --git a/pkg/nodeclaim/nodeclaim_test.go b/pkg/nodeclaim/nodeclaim_test.go index 7dbcacfe7..44fb1757e 100644 --- a/pkg/nodeclaim/nodeclaim_test.go +++ b/pkg/nodeclaim/nodeclaim_test.go @@ -10,9 +10,9 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/utils/consts" - "github.com/azure/kaito/pkg/utils/test" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/mock" "gotest.tools/assert" corev1 "k8s.io/api/core/v1" diff --git a/pkg/resources/manifests.go b/pkg/resources/manifests.go index aa439f85a..fa90b3cab 100644 --- a/pkg/resources/manifests.go +++ b/pkg/resources/manifests.go @@ -12,7 +12,7 @@ import ( "k8s.io/apimachinery/pkg/util/intstr" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" "github.com/samber/lo" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" diff --git a/pkg/resources/manifests_test.go b/pkg/resources/manifests_test.go index b52de03e8..42b28def0 100644 --- a/pkg/resources/manifests_test.go +++ b/pkg/resources/manifests_test.go @@ -8,11 +8,11 @@ import ( "fmt" "reflect" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/pkg/utils/test" "testing" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" v1 "k8s.io/api/core/v1" ) diff --git a/pkg/resources/nodes.go b/pkg/resources/nodes.go index adfeb0a0f..168e7b438 100644 --- a/pkg/resources/nodes.go +++ b/pkg/resources/nodes.go @@ -7,7 +7,7 @@ import ( "context" "fmt" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" "github.com/samber/lo" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" diff --git a/pkg/resources/nodes_test.go b/pkg/resources/nodes_test.go index 17bf285c9..169dfb8f1 100644 --- a/pkg/resources/nodes_test.go +++ b/pkg/resources/nodes_test.go @@ -5,7 +5,7 @@ package resources import ( "context" "errors" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/pkg/utils/test" "testing" "github.com/stretchr/testify/mock" diff --git a/pkg/resources/resources_test.go b/pkg/resources/resources_test.go index cc09c2467..7106dd52e 100644 --- a/pkg/resources/resources_test.go +++ b/pkg/resources/resources_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - "github.com/azure/kaito/pkg/utils/test" + "github.com/kaito-project/kaito/pkg/utils/test" appsv1 "k8s.io/api/apps/v1" batchv1 "k8s.io/api/batch/v1" diff --git a/pkg/sku/cloud_sku_handler.go b/pkg/sku/cloud_sku_handler.go index 762da2ac1..7e5187b73 100644 --- a/pkg/sku/cloud_sku_handler.go +++ b/pkg/sku/cloud_sku_handler.go @@ -4,7 +4,7 @@ package sku import ( - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/consts" ) type CloudSKUHandler interface { diff --git a/pkg/tuning/preset-tuning.go b/pkg/tuning/preset-tuning.go index ad81115a5..d7bf52ce6 100644 --- a/pkg/tuning/preset-tuning.go +++ b/pkg/tuning/preset-tuning.go @@ -7,17 +7,17 @@ import ( "path/filepath" "strings" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils/consts" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/pointer" "knative.dev/pkg/apis" "k8s.io/apimachinery/pkg/api/resource" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/resources" - "github.com/azure/kaito/pkg/utils" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/resources" + "github.com/kaito-project/kaito/pkg/utils" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/klog/v2" diff --git a/pkg/tuning/preset-tuning_test.go b/pkg/tuning/preset-tuning_test.go index 99344ddcb..6e1ede930 100644 --- a/pkg/tuning/preset-tuning_test.go +++ b/pkg/tuning/preset-tuning_test.go @@ -9,12 +9,12 @@ import ( "strings" "testing" - "github.com/azure/kaito/pkg/utils" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/utils" + "github.com/kaito-project/kaito/pkg/utils/consts" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/test" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" corev1 "k8s.io/api/core/v1" diff --git a/pkg/utils/common-preset.go b/pkg/utils/common-preset.go index df96276d4..3a43f6d0d 100644 --- a/pkg/utils/common-preset.go +++ b/pkg/utils/common-preset.go @@ -3,7 +3,7 @@ package utils import ( - "github.com/azure/kaito/pkg/utils/plugin" + "github.com/kaito-project/kaito/pkg/utils/plugin" corev1 "k8s.io/api/core/v1" ) diff --git a/pkg/utils/common.go b/pkg/utils/common.go index 94209bf83..713b0a2a2 100644 --- a/pkg/utils/common.go +++ b/pkg/utils/common.go @@ -10,8 +10,8 @@ import ( "os" "strings" - "github.com/azure/kaito/pkg/sku" - "github.com/azure/kaito/pkg/utils/consts" + "github.com/kaito-project/kaito/pkg/sku" + "github.com/kaito-project/kaito/pkg/utils/consts" "gopkg.in/yaml.v2" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/fields" diff --git a/pkg/utils/plugin/plugin.go b/pkg/utils/plugin/plugin.go index 6f186a9f9..35706cb9c 100644 --- a/pkg/utils/plugin/plugin.go +++ b/pkg/utils/plugin/plugin.go @@ -5,7 +5,7 @@ package plugin import ( "sync" - "github.com/azure/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/model" ) type Registration struct { diff --git a/pkg/utils/test/testModel.go b/pkg/utils/test/testModel.go index d12d3f720..d4ef5faf6 100644 --- a/pkg/utils/test/testModel.go +++ b/pkg/utils/test/testModel.go @@ -6,8 +6,8 @@ package test import ( "time" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) type testModel struct{} diff --git a/pkg/utils/test/testUtils.go b/pkg/utils/test/testUtils.go index 3e6410efc..89fdc96bf 100644 --- a/pkg/utils/test/testUtils.go +++ b/pkg/utils/test/testUtils.go @@ -5,7 +5,7 @@ package test import ( "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - "github.com/azure/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/api/v1alpha1" "github.com/samber/lo" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" diff --git a/pkg/webhooks/webhooks.go b/pkg/webhooks/webhooks.go index 4bd1eb9f6..6da64bac0 100644 --- a/pkg/webhooks/webhooks.go +++ b/pkg/webhooks/webhooks.go @@ -13,7 +13,7 @@ import ( "knative.dev/pkg/webhook/resourcesemantics" "knative.dev/pkg/webhook/resourcesemantics/validation" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" ) func NewWorkspaceWebhooks() []knativeinjection.ControllerConstructor { diff --git a/presets/models/falcon/model.go b/presets/models/falcon/model.go index 74c39995f..e73c7c3db 100644 --- a/presets/models/falcon/model.go +++ b/presets/models/falcon/model.go @@ -3,13 +3,13 @@ package falcon import ( - "github.com/azure/kaito/pkg/tuning" + "github.com/kaito-project/kaito/pkg/tuning" "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) func init() { diff --git a/presets/models/llama2/README.md b/presets/models/llama2/README.md index 3dd2420b0..a38808f21 100644 --- a/presets/models/llama2/README.md +++ b/presets/models/llama2/README.md @@ -12,7 +12,7 @@ #### 1. Clone kaito repository ``` -git clone https://github.com/Azure/kaito.git +git clone https://github.com/kaito-project/kaito.git ``` The sample docker files and the source code of the inference API server are in the repo. diff --git a/presets/models/llama2/model.go b/presets/models/llama2/model.go index 6a62a8987..39a03a3d2 100644 --- a/presets/models/llama2/model.go +++ b/presets/models/llama2/model.go @@ -5,10 +5,10 @@ package llama2 import ( "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) func init() { diff --git a/presets/models/llama2chat/README.md b/presets/models/llama2chat/README.md index 37f86970d..a5f2b5e73 100644 --- a/presets/models/llama2chat/README.md +++ b/presets/models/llama2chat/README.md @@ -12,7 +12,7 @@ #### 1. Clone kaito repository ``` -git clone https://github.com/Azure/kaito.git +git clone https://github.com/kaito-project/kaito.git ``` The sample docker files and the source code of the inference API server are in the repo. diff --git a/presets/models/llama2chat/model.go b/presets/models/llama2chat/model.go index 89225bef5..8c3692e29 100644 --- a/presets/models/llama2chat/model.go +++ b/presets/models/llama2chat/model.go @@ -5,10 +5,10 @@ package llama2chat import ( "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) func init() { diff --git a/presets/models/mistral/model.go b/presets/models/mistral/model.go index b4581d6f1..1ee039b72 100644 --- a/presets/models/mistral/model.go +++ b/presets/models/mistral/model.go @@ -5,10 +5,10 @@ package mistral import ( "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) func init() { diff --git a/presets/models/phi2/model.go b/presets/models/phi2/model.go index 07fb8e0d2..0b038583b 100644 --- a/presets/models/phi2/model.go +++ b/presets/models/phi2/model.go @@ -5,10 +5,10 @@ package phi2 import ( "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) func init() { diff --git a/presets/models/phi3/model.go b/presets/models/phi3/model.go index 5656fc15a..1f15a827e 100644 --- a/presets/models/phi3/model.go +++ b/presets/models/phi3/model.go @@ -5,10 +5,10 @@ package phi3 import ( "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/pkg/inference" - "github.com/azure/kaito/pkg/model" - "github.com/azure/kaito/pkg/utils/plugin" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/pkg/inference" + "github.com/kaito-project/kaito/pkg/model" + "github.com/kaito-project/kaito/pkg/utils/plugin" ) func init() { diff --git a/presets/test/falcon-benchmark/README.md b/presets/test/falcon-benchmark/README.md index 1a624de92..5f4a9d3ba 100644 --- a/presets/test/falcon-benchmark/README.md +++ b/presets/test/falcon-benchmark/README.md @@ -23,7 +23,7 @@ Ensure your `accelerate` configuration aligns with the values provided during be - If you haven't already, you can use the Azure CLI or the Azure Portal to create and configure a GPU node pool in your AKS cluster. 2. Building and Pushing the Docker Image: - - First, you need to build a Docker image from the provided [Dockerfile](https://github.com/Azure/kaito/blob/main/docker/presets/models/tfs/Dockerfile) and push it to a container registry accessible by your AKS cluster + - First, you need to build a Docker image from the provided [Dockerfile](https://github.com/kaito-project/kaito/blob/main/docker/presets/models/tfs/Dockerfile) and push it to a container registry accessible by your AKS cluster - Example: ``` diff --git a/test/e2e/e2e_test.go b/test/e2e/e2e_test.go index 57f14fb59..eb322d676 100644 --- a/test/e2e/e2e_test.go +++ b/test/e2e/e2e_test.go @@ -10,7 +10,7 @@ import ( "os" "testing" - "github.com/azure/kaito/test/e2e/utils" + "github.com/kaito-project/kaito/test/e2e/utils" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" v1 "k8s.io/api/apps/v1" diff --git a/test/e2e/inference_with_adapters_test.go b/test/e2e/inference_with_adapters_test.go index e72dc30ac..c0e7fef36 100644 --- a/test/e2e/inference_with_adapters_test.go +++ b/test/e2e/inference_with_adapters_test.go @@ -8,8 +8,8 @@ import ( "strings" "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/test/e2e/utils" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/test/e2e/utils" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" appsv1 "k8s.io/api/apps/v1" diff --git a/test/e2e/preset_test.go b/test/e2e/preset_test.go index 761c40839..607f48340 100644 --- a/test/e2e/preset_test.go +++ b/test/e2e/preset_test.go @@ -12,8 +12,8 @@ import ( "strings" "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/test/e2e/utils" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/test/e2e/utils" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" "github.com/samber/lo" diff --git a/test/e2e/utils/cluster.go b/test/e2e/utils/cluster.go index e483da756..555472e36 100644 --- a/test/e2e/utils/cluster.go +++ b/test/e2e/utils/cluster.go @@ -7,7 +7,7 @@ import ( azurev1alpha2 "github.com/Azure/karpenter-provider-azure/pkg/apis/v1alpha2" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" awsv1beta1 "github.com/aws/karpenter-provider-aws/pkg/apis/v1beta1" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" "github.com/onsi/gomega" "k8s.io/apimachinery/pkg/runtime" utilruntime "k8s.io/apimachinery/pkg/util/runtime" diff --git a/test/e2e/utils/machine.go b/test/e2e/utils/machine.go index 19b7c8a61..4d62fbe9a 100644 --- a/test/e2e/utils/machine.go +++ b/test/e2e/utils/machine.go @@ -9,7 +9,7 @@ import ( "time" "github.com/aws/karpenter-core/pkg/apis/v1alpha5" - "github.com/azure/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/api/v1alpha1" "github.com/onsi/ginkgo/v2" "github.com/onsi/gomega" "github.com/samber/lo" diff --git a/test/e2e/utils/nodeclaim.go b/test/e2e/utils/nodeclaim.go index 242c6e0f2..73fb70305 100644 --- a/test/e2e/utils/nodeclaim.go +++ b/test/e2e/utils/nodeclaim.go @@ -8,7 +8,7 @@ import ( "fmt" "time" - "github.com/azure/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/api/v1alpha1" "github.com/onsi/ginkgo/v2" "github.com/onsi/gomega" "github.com/samber/lo" diff --git a/test/e2e/utils/utils.go b/test/e2e/utils/utils.go index 4b7f65954..5dc011f76 100644 --- a/test/e2e/utils/utils.go +++ b/test/e2e/utils/utils.go @@ -18,7 +18,7 @@ import ( "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" "github.com/samber/lo" "gopkg.in/yaml.v2" corev1 "k8s.io/api/core/v1" diff --git a/test/e2e/webhook_test.go b/test/e2e/webhook_test.go index e3b05b8b5..d55c4d6ca 100644 --- a/test/e2e/webhook_test.go +++ b/test/e2e/webhook_test.go @@ -8,8 +8,8 @@ import ( "math/rand" "time" - kaitov1alpha1 "github.com/azure/kaito/api/v1alpha1" - "github.com/azure/kaito/test/e2e/utils" + kaitov1alpha1 "github.com/kaito-project/kaito/api/v1alpha1" + "github.com/kaito-project/kaito/test/e2e/utils" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" From cff767bc01c0aa9df818552e3b71931acac4641a Mon Sep 17 00:00:00 2001 From: ishaansehgal99 Date: Thu, 31 Oct 2024 13:17:29 -0700 Subject: [PATCH 2/5] fix: Update gingko timeout --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f09dc53c3..ad5243de0 100644 --- a/Makefile +++ b/Makefile @@ -120,7 +120,7 @@ GINKGO_FOCUS ?= GINKGO_SKIP ?= GINKGO_NODES ?= 1 GINKGO_NO_COLOR ?= false -GINKGO_TIMEOUT ?= 120m +GINKGO_TIMEOUT ?= 180m GINKGO_ARGS ?= -focus="$(GINKGO_FOCUS)" -skip="$(GINKGO_SKIP)" -nodes=$(GINKGO_NODES) -no-color=$(GINKGO_NO_COLOR) -timeout=$(GINKGO_TIMEOUT) $(E2E_TEST): From b61fa45465f57fa7cfe829a22ca4bfb9bc443210 Mon Sep 17 00:00:00 2001 From: jerryzhuang Date: Fri, 1 Nov 2024 21:27:31 +1100 Subject: [PATCH 3/5] update gpu provisioner install cmd Signed-off-by: jerryzhuang --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index ad5243de0..866a4ef5f 100644 --- a/Makefile +++ b/Makefile @@ -291,6 +291,7 @@ gpu-provisioner-helm: ## Update Azure client env vars and settings in helm valu helm install $(GPU_PROVISIONER_NAMESPACE) \ --values gpu-provisioner-values.yaml \ --set settings.azure.clusterName=$(AZURE_CLUSTER_NAME) \ + --namespace $(GPU_PROVISIONER_NAMESPACE) --create-namespace \ https://github.com/Azure/gpu-provisioner/raw/gh-pages/charts/gpu-provisioner-$(GPU_PROVISIONER_VERSION).tgz kubectl wait --for=condition=available deploy "gpu-provisioner" -n gpu-provisioner --timeout=300s From 85f81c23da86378da7f5ff60bef3d0511d6a6207 Mon Sep 17 00:00:00 2001 From: Ishaan Sehgal Date: Fri, 1 Nov 2024 09:26:28 -0700 Subject: [PATCH 4/5] Update Makefile Signed-off-by: Ishaan Sehgal --- Makefile | 2 +- test/e2e/inference_with_adapters_test.go | 9 +- test/e2e/preset_test.go | 194 ++++++++--------- test/e2e/webhook_test.go | 265 ++++++++++++----------- 4 files changed, 238 insertions(+), 232 deletions(-) diff --git a/Makefile b/Makefile index 866a4ef5f..acf7ef357 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ REGISTRY ?= YOUR_REGISTRY IMG_NAME ?= workspace VERSION ?= v0.3.1 -GPU_PROVISIONER_VERSION ?= 0.2.0 +GPU_PROVISIONER_VERSION ?= 0.2.1 IMG_TAG ?= $(subst v,,$(VERSION)) ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) diff --git a/test/e2e/inference_with_adapters_test.go b/test/e2e/inference_with_adapters_test.go index c0e7fef36..23188eb93 100644 --- a/test/e2e/inference_with_adapters_test.go +++ b/test/e2e/inference_with_adapters_test.go @@ -64,6 +64,7 @@ var expectedInitContainers2 = []corev1.Container{ func validateInitContainers(workspaceObj *kaitov1alpha1.Workspace, expectedInitContainers []corev1.Container) { By("Checking the InitContainers", func() { + GinkgoWriter.Printf("Starting to check InitContainers...\n") Eventually(func() bool { var err error var initContainers []corev1.Container @@ -79,6 +80,7 @@ func validateInitContainers(workspaceObj *kaitov1alpha1.Workspace, expectedInitC Name: workspaceObj.Name, }, dep) initContainers = dep.Spec.Template.Spec.InitContainers + GinkgoWriter.Printf("initContainers:%v\n", initContainers) if err != nil { GinkgoWriter.Printf("Error fetching resource: %v\n", err) @@ -93,9 +95,11 @@ func validateInitContainers(workspaceObj *kaitov1alpha1.Workspace, expectedInitC } initContainer, expectedInitContainer := initContainers[0], expectedInitContainers[0] - // GinkgoWriter.Printf("Resource '%s' not ready. Ready replicas: %d\n", workspaceObj.Name, readyReplicas) + GinkgoWriter.Printf("adapter output: initContainer.Image=%s, expectedInitContainer.Image=%s, equal=%v\n", initContainer.Image, expectedInitContainer.Image, initContainer.Image == expectedInitContainer.Image) + GinkgoWriter.Printf("adapter output: initContainer.Name=%s, expectedInitContainer.Name=%s, equal=%v\n", initContainer.Name, expectedInitContainer.Name, initContainer.Name == expectedInitContainer.Name) + return initContainer.Image == expectedInitContainer.Image && initContainer.Name == expectedInitContainer.Name - }, 20*time.Minute, utils.PollInterval).Should(BeTrue(), "Failed to wait for initContainers to be ready") + }, 5*time.Minute, utils.PollInterval).Should(BeTrue(), "Failed to wait for initContainers to be ready") }) } @@ -157,6 +161,7 @@ var _ = Describe("Workspace Preset", func() { } else { utils.ValidateMachineCreation(ctx, workspaceObj, numOfNode) } + validateResourceStatus(workspaceObj) time.Sleep(30 * time.Second) diff --git a/test/e2e/preset_test.go b/test/e2e/preset_test.go index 607f48340..b5318ced3 100644 --- a/test/e2e/preset_test.go +++ b/test/e2e/preset_test.go @@ -662,151 +662,151 @@ var _ = Describe("Workspace Preset", func() { Fail("Fail threshold reached") } }) + /* + It("should create a mistral workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createMistralWorkspaceWithPresetPublicMode(numOfNode) - It("should create a mistral workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createMistralWorkspaceWithPresetPublicMode(numOfNode) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) - - validateAssociatedService(workspaceObj) + validateAssociatedService(workspaceObj) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - validateWorkspaceReadiness(workspaceObj) - }) + validateWorkspaceReadiness(workspaceObj) + }) - It("should create a Phi-2 workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createPhi2WorkspaceWithPresetPublicMode(numOfNode) + It("should create a Phi-2 workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createPhi2WorkspaceWithPresetPublicMode(numOfNode) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - time.Sleep(30 * time.Second) + time.Sleep(30 * time.Second) - validateAssociatedService(workspaceObj) + validateAssociatedService(workspaceObj) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - validateWorkspaceReadiness(workspaceObj) - }) + validateWorkspaceReadiness(workspaceObj) + }) - It("should create a falcon workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createFalconWorkspaceWithPresetPublicMode(numOfNode) + It("should create a falcon workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createFalconWorkspaceWithPresetPublicMode(numOfNode) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - time.Sleep(30 * time.Second) + time.Sleep(30 * time.Second) - validateAssociatedService(workspaceObj) + validateAssociatedService(workspaceObj) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - validateWorkspaceReadiness(workspaceObj) - }) - - It("should create a llama 7b workspace with preset private mode successfully", func() { - numOfNode := 1 - modelVersion, ok := modelInfo[PresetLlama2AChat] - if !ok { - Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) - } - workspaceObj := createLlama7BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + It("should create a llama 7b workspace with preset private mode successfully", func() { + numOfNode := 1 + modelVersion, ok := modelInfo[PresetLlama2AChat] + if !ok { + Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) + } + workspaceObj := createLlama7BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateAssociatedService(workspaceObj) + time.Sleep(30 * time.Second) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateAssociatedService(workspaceObj) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - It("should create a llama 13b workspace with preset private mode successfully", func() { - if !runLlama13B { - Skip("Skipping llama 13b workspace test") - } - numOfNode := 2 - modelVersion, ok := modelInfo[PresetLlama2BChat] - if !ok { - Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) - } - workspaceObj := createLlama13BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) - defer cleanupResources(workspaceObj) + It("should create a llama 13b workspace with preset private mode successfully", func() { + if !runLlama13B { + Skip("Skipping llama 13b workspace test") + } + numOfNode := 2 + modelVersion, ok := modelInfo[PresetLlama2BChat] + if !ok { + Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) + } + workspaceObj := createLlama13BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) - time.Sleep(30 * time.Second) + defer cleanupResources(workspaceObj) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateAssociatedService(workspaceObj) + time.Sleep(30 * time.Second) - validateInferenceResource(workspaceObj, int32(numOfNode), true) + validateAssociatedService(workspaceObj) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), true) - It("should create a custom template workspace successfully", func() { - numOfNode := 1 - imageName := "nginx:latest" - workspaceObj := createCustomWorkspaceWithPresetCustomMode(imageName, numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) - defer cleanupResources(workspaceObj) + It("should create a custom template workspace successfully", func() { + numOfNode := 1 + imageName := "nginx:latest" + workspaceObj := createCustomWorkspaceWithPresetCustomMode(imageName, numOfNode) - time.Sleep(30 * time.Second) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + time.Sleep(30 * time.Second) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - It("should create a Phi-3-mini-128k-instruct workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createPhi3WorkspaceWithPresetPublicMode(numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + It("should create a Phi-3-mini-128k-instruct workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createPhi3WorkspaceWithPresetPublicMode(numOfNode) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateAssociatedService(workspaceObj) + time.Sleep(30 * time.Second) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateAssociatedService(workspaceObj) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateWorkspaceReadiness(workspaceObj) + }) + */ It("should create a workspace for tuning successfully, and update the workspace with another dataset and output image", func() { numOfNode := 1 err := copySecretToNamespace(e2eACRSecret, namespaceName) diff --git a/test/e2e/webhook_test.go b/test/e2e/webhook_test.go index d55c4d6ca..37081d92f 100644 --- a/test/e2e/webhook_test.go +++ b/test/e2e/webhook_test.go @@ -43,154 +43,155 @@ var _ = Describe("Workspace Validation Webhook", func() { Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) }) }) - - It("should validate the workspace inference spec at creation ", func() { - workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC6", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, "invalid-name", kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) - - By("Creating a workspace with invalid preset name", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) - }) - - It("should validate the workspace tuning spec at creation ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, nil, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) - - By("Creating a workspace with nil input", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) + /* + + It("should validate the workspace inference spec at creation ", func() { + workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC6", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, "invalid-name", kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) + + By("Creating a workspace with invalid preset name", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) + }) }) - }) - - It("should validate the workspace tuning spec at creation ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, testDataSourceConfig, nil, initialPresetSpec, initialTuningMethod) - By("Creating a workspace with nil output", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) - }) - - It("should validate the workspace tuning spec at creation ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, testDataSourceConfig, testDataDestinationConfig, nil, initialTuningMethod) - - By("Creating a workspace with nil preset", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) - }) - - //TODO preset private mode - //TODO custom template - - It("should validate the workspace resource spec at update ", func() { - workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, PresetFalcon7BModel, kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) - - By("Creating a valid workspace", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) + It("should validate the workspace tuning spec at creation ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, nil, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) + + By("Creating a workspace with nil input", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) + }) }) - By("Updating the label selector", func() { - updatedObj := workspaceObj - updatedObj.Resource.LabelSelector = &metav1.LabelSelector{} - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + It("should validate the workspace tuning spec at creation ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, testDataSourceConfig, nil, initialPresetSpec, initialTuningMethod) + + By("Creating a workspace with nil output", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) + }) }) - By("Updating the InstanceType", func() { - updatedObj := workspaceObj - updatedObj.Resource.InstanceType = "Standard_NC12" - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + It("should validate the workspace tuning spec at creation ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, testDataSourceConfig, testDataDestinationConfig, nil, initialTuningMethod) + + By("Creating a workspace with nil preset", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) + }) }) + //TODO preset private mode //TODO custom template - // delete workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) - }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") - - }) - - It("should validate the workspace tuning spec at update ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, testDataSourceConfig, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) - - By("Creating a valid tuning workspace", func() { - // Create workspace + It("should validate the workspace resource spec at update ", func() { + workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, PresetFalcon7BModel, kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) + + By("Creating a valid workspace", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) + }) + + By("Updating the label selector", func() { + updatedObj := workspaceObj + updatedObj.Resource.LabelSelector = &metav1.LabelSelector{} + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + }) + + By("Updating the InstanceType", func() { + updatedObj := workspaceObj + updatedObj.Resource.InstanceType = "Standard_NC12" + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + }) + + //TODO custom template + + // delete workspace Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) - }) + return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) + }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") - By("Updating the tuning preset", func() { - updatedObj := workspaceObj - updatedObj.Tuning.Preset = updatedPresetSpec - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) }) - By("Updating the Method", func() { - updatedObj := workspaceObj - updatedObj.Tuning.Method = alternativeTuningMethod - // update workspace + It("should validate the workspace tuning spec at update ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, testDataSourceConfig, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) + + By("Creating a valid tuning workspace", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) + }) + + By("Updating the tuning preset", func() { + updatedObj := workspaceObj + updatedObj.Tuning.Preset = updatedPresetSpec + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + }) + + By("Updating the Method", func() { + updatedObj := workspaceObj + updatedObj.Tuning.Method = alternativeTuningMethod + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + }) + + // delete workspace Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) - }) - - // delete workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) - }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") - - }) + return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) + }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") + }) + */ It("should validate the workspace inference spec at update ", func() { workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", &metav1.LabelSelector{ From d5ea264985b03475ff911c36ebc2c458edaed21b Mon Sep 17 00:00:00 2001 From: zhuangqh Date: Mon, 4 Nov 2024 10:07:18 +1100 Subject: [PATCH 5/5] Revert "Update Makefile" This reverts commit 85f81c23da86378da7f5ff60bef3d0511d6a6207. --- test/e2e/inference_with_adapters_test.go | 9 +- test/e2e/preset_test.go | 194 ++++++++--------- test/e2e/webhook_test.go | 265 +++++++++++------------ 3 files changed, 231 insertions(+), 237 deletions(-) diff --git a/test/e2e/inference_with_adapters_test.go b/test/e2e/inference_with_adapters_test.go index e4ba6f22a..c2fd1ffe2 100644 --- a/test/e2e/inference_with_adapters_test.go +++ b/test/e2e/inference_with_adapters_test.go @@ -64,7 +64,6 @@ var expectedInitContainers2 = []corev1.Container{ func validateInitContainers(workspaceObj *kaitov1alpha1.Workspace, expectedInitContainers []corev1.Container) { By("Checking the InitContainers", func() { - GinkgoWriter.Printf("Starting to check InitContainers...\n") Eventually(func() bool { var err error var initContainers []corev1.Container @@ -80,7 +79,6 @@ func validateInitContainers(workspaceObj *kaitov1alpha1.Workspace, expectedInitC Name: workspaceObj.Name, }, dep) initContainers = dep.Spec.Template.Spec.InitContainers - GinkgoWriter.Printf("initContainers:%v\n", initContainers) if err != nil { GinkgoWriter.Printf("Error fetching resource: %v\n", err) @@ -97,11 +95,9 @@ func validateInitContainers(workspaceObj *kaitov1alpha1.Workspace, expectedInitC } } - GinkgoWriter.Printf("adapter output: initContainer.Image=%s, expectedInitContainer.Image=%s, equal=%v\n", initContainer.Image, expectedInitContainer.Image, initContainer.Image == expectedInitContainer.Image) - GinkgoWriter.Printf("adapter output: initContainer.Name=%s, expectedInitContainer.Name=%s, equal=%v\n", initContainer.Name, expectedInitContainer.Name, initContainer.Name == expectedInitContainer.Name) - + // GinkgoWriter.Printf("Resource '%s' not ready. Ready replicas: %d\n", workspaceObj.Name, readyReplicas) return initContainer.Image == expectedInitContainer.Image && initContainer.Name == expectedInitContainer.Name - }, 5*time.Minute, utils.PollInterval).Should(BeTrue(), "Failed to wait for initContainers to be ready") + }, 20*time.Minute, utils.PollInterval).Should(BeTrue(), "Failed to wait for initContainers to be ready") }) } @@ -163,7 +159,6 @@ var _ = Describe("Workspace Preset", func() { } else { utils.ValidateMachineCreation(ctx, workspaceObj, numOfNode) } - validateResourceStatus(workspaceObj) time.Sleep(30 * time.Second) diff --git a/test/e2e/preset_test.go b/test/e2e/preset_test.go index b5318ced3..607f48340 100644 --- a/test/e2e/preset_test.go +++ b/test/e2e/preset_test.go @@ -662,151 +662,151 @@ var _ = Describe("Workspace Preset", func() { Fail("Fail threshold reached") } }) - /* - It("should create a mistral workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createMistralWorkspaceWithPresetPublicMode(numOfNode) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + It("should create a mistral workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createMistralWorkspaceWithPresetPublicMode(numOfNode) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateAssociatedService(workspaceObj) + time.Sleep(30 * time.Second) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateAssociatedService(workspaceObj) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - It("should create a Phi-2 workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createPhi2WorkspaceWithPresetPublicMode(numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + It("should create a Phi-2 workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createPhi2WorkspaceWithPresetPublicMode(numOfNode) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateAssociatedService(workspaceObj) + time.Sleep(30 * time.Second) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateAssociatedService(workspaceObj) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - It("should create a falcon workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createFalconWorkspaceWithPresetPublicMode(numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + It("should create a falcon workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createFalconWorkspaceWithPresetPublicMode(numOfNode) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateAssociatedService(workspaceObj) + time.Sleep(30 * time.Second) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateAssociatedService(workspaceObj) - validateWorkspaceReadiness(workspaceObj) - }) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - It("should create a llama 7b workspace with preset private mode successfully", func() { - numOfNode := 1 - modelVersion, ok := modelInfo[PresetLlama2AChat] - if !ok { - Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) - } - workspaceObj := createLlama7BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) + validateWorkspaceReadiness(workspaceObj) + }) + + It("should create a llama 7b workspace with preset private mode successfully", func() { + numOfNode := 1 + modelVersion, ok := modelInfo[PresetLlama2AChat] + if !ok { + Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) + } + workspaceObj := createLlama7BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) + + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateAssociatedService(workspaceObj) - validateAssociatedService(workspaceObj) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateWorkspaceReadiness(workspaceObj) + }) - validateWorkspaceReadiness(workspaceObj) - }) + It("should create a llama 13b workspace with preset private mode successfully", func() { + if !runLlama13B { + Skip("Skipping llama 13b workspace test") + } + numOfNode := 2 + modelVersion, ok := modelInfo[PresetLlama2BChat] + if !ok { + Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) + } + workspaceObj := createLlama13BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) - It("should create a llama 13b workspace with preset private mode successfully", func() { - if !runLlama13B { - Skip("Skipping llama 13b workspace test") - } - numOfNode := 2 - modelVersion, ok := modelInfo[PresetLlama2BChat] - if !ok { - Fail(fmt.Sprintf("Model version for %s not found", PresetLlama2AChat)) - } - workspaceObj := createLlama13BWorkspaceWithPresetPrivateMode(aiModelsRegistry, aiModelsRegistrySecret, modelVersion, numOfNode) + defer cleanupResources(workspaceObj) - defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateAssociatedService(workspaceObj) - validateAssociatedService(workspaceObj) + validateInferenceResource(workspaceObj, int32(numOfNode), true) - validateInferenceResource(workspaceObj, int32(numOfNode), true) + validateWorkspaceReadiness(workspaceObj) + }) - validateWorkspaceReadiness(workspaceObj) - }) + It("should create a custom template workspace successfully", func() { + numOfNode := 1 + imageName := "nginx:latest" + workspaceObj := createCustomWorkspaceWithPresetCustomMode(imageName, numOfNode) - It("should create a custom template workspace successfully", func() { - numOfNode := 1 - imageName := "nginx:latest" - workspaceObj := createCustomWorkspaceWithPresetCustomMode(imageName, numOfNode) + defer cleanupResources(workspaceObj) - defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - time.Sleep(30 * time.Second) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateWorkspaceReadiness(workspaceObj) + }) - validateWorkspaceReadiness(workspaceObj) - }) + It("should create a Phi-3-mini-128k-instruct workspace with preset public mode successfully", func() { + numOfNode := 1 + workspaceObj := createPhi3WorkspaceWithPresetPublicMode(numOfNode) - It("should create a Phi-3-mini-128k-instruct workspace with preset public mode successfully", func() { - numOfNode := 1 - workspaceObj := createPhi3WorkspaceWithPresetPublicMode(numOfNode) + defer cleanupResources(workspaceObj) + time.Sleep(30 * time.Second) - defer cleanupResources(workspaceObj) - time.Sleep(30 * time.Second) + validateCreateNode(workspaceObj, numOfNode) + validateResourceStatus(workspaceObj) - validateCreateNode(workspaceObj, numOfNode) - validateResourceStatus(workspaceObj) + time.Sleep(30 * time.Second) - time.Sleep(30 * time.Second) + validateAssociatedService(workspaceObj) - validateAssociatedService(workspaceObj) + validateInferenceResource(workspaceObj, int32(numOfNode), false) - validateInferenceResource(workspaceObj, int32(numOfNode), false) + validateWorkspaceReadiness(workspaceObj) + }) - validateWorkspaceReadiness(workspaceObj) - }) - */ It("should create a workspace for tuning successfully, and update the workspace with another dataset and output image", func() { numOfNode := 1 err := copySecretToNamespace(e2eACRSecret, namespaceName) diff --git a/test/e2e/webhook_test.go b/test/e2e/webhook_test.go index 37081d92f..d55c4d6ca 100644 --- a/test/e2e/webhook_test.go +++ b/test/e2e/webhook_test.go @@ -43,155 +43,154 @@ var _ = Describe("Workspace Validation Webhook", func() { Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) }) }) - /* - - It("should validate the workspace inference spec at creation ", func() { - workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC6", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, "invalid-name", kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) - - By("Creating a workspace with invalid preset name", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) + + It("should validate the workspace inference spec at creation ", func() { + workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC6", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, "invalid-name", kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) + + By("Creating a workspace with invalid preset name", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) }) + }) + + It("should validate the workspace tuning spec at creation ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, nil, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) - It("should validate the workspace tuning spec at creation ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, nil, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) - - By("Creating a workspace with nil input", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) + By("Creating a workspace with nil input", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) }) + }) - It("should validate the workspace tuning spec at creation ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, testDataSourceConfig, nil, initialPresetSpec, initialTuningMethod) - - By("Creating a workspace with nil output", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) + It("should validate the workspace tuning spec at creation ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, testDataSourceConfig, nil, initialPresetSpec, initialTuningMethod) + + By("Creating a workspace with nil output", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) }) + }) - It("should validate the workspace tuning spec at creation ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, testDataSourceConfig, testDataDestinationConfig, nil, initialTuningMethod) - - By("Creating a workspace with nil preset", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) - }) + It("should validate the workspace tuning spec at creation ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, testDataSourceConfig, testDataDestinationConfig, nil, initialTuningMethod) + + By("Creating a workspace with nil preset", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(HaveOccurred(), "Failed to create workspace %s", workspaceObj.Name) }) + }) - //TODO preset private mode - //TODO custom template + //TODO preset private mode + //TODO custom template + + It("should validate the workspace resource spec at update ", func() { + workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, PresetFalcon7BModel, kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) + + By("Creating a valid workspace", func() { + // Create workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) + }) - It("should validate the workspace resource spec at update ", func() { - workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, PresetFalcon7BModel, kaitov1alpha1.ModelImageAccessModePublic, nil, nil, nil) - - By("Creating a valid workspace", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) - }) - - By("Updating the label selector", func() { - updatedObj := workspaceObj - updatedObj.Resource.LabelSelector = &metav1.LabelSelector{} - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) - }) - - By("Updating the InstanceType", func() { - updatedObj := workspaceObj - updatedObj.Resource.InstanceType = "Standard_NC12" - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) - }) - - //TODO custom template - - // delete workspace + By("Updating the label selector", func() { + updatedObj := workspaceObj + updatedObj.Resource.LabelSelector = &metav1.LabelSelector{} + // update workspace Eventually(func() error { - return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) - }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + }) + By("Updating the InstanceType", func() { + updatedObj := workspaceObj + updatedObj.Resource.InstanceType = "Standard_NC12" + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) }) - It("should validate the workspace tuning spec at update ", func() { - workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", - &metav1.LabelSelector{ - MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, - }, nil, testDataSourceConfig, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) - - By("Creating a valid tuning workspace", func() { - // Create workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) - }, 20*time.Minute, utils.PollInterval). - Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) - }) - - By("Updating the tuning preset", func() { - updatedObj := workspaceObj - updatedObj.Tuning.Preset = updatedPresetSpec - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) - }) - - By("Updating the Method", func() { - updatedObj := workspaceObj - updatedObj.Tuning.Method = alternativeTuningMethod - // update workspace - Eventually(func() error { - return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) - }, utils.PollTimeout, utils.PollInterval). - Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) - }) - - // delete workspace + //TODO custom template + + // delete workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) + }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") + + }) + + It("should validate the workspace tuning spec at update ", func() { + workspaceObj := utils.GenerateTuningWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", + &metav1.LabelSelector{ + MatchLabels: map[string]string{"kaito-workspace": "webhook-e2e-test"}, + }, nil, testDataSourceConfig, testDataDestinationConfig, initialPresetSpec, initialTuningMethod) + + By("Creating a valid tuning workspace", func() { + // Create workspace Eventually(func() error { - return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) - }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") + return utils.TestingCluster.KubeClient.Create(ctx, workspaceObj, &client.CreateOptions{}) + }, 20*time.Minute, utils.PollInterval). + Should(Succeed(), "Failed to create workspace %s", workspaceObj.Name) + }) + By("Updating the tuning preset", func() { + updatedObj := workspaceObj + updatedObj.Tuning.Preset = updatedPresetSpec + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) }) - */ + + By("Updating the Method", func() { + updatedObj := workspaceObj + updatedObj.Tuning.Method = alternativeTuningMethod + // update workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Update(ctx, updatedObj, &client.UpdateOptions{}) + }, utils.PollTimeout, utils.PollInterval). + Should(HaveOccurred(), "Failed to update workspace %s", updatedObj.Name) + }) + + // delete workspace + Eventually(func() error { + return utils.TestingCluster.KubeClient.Delete(ctx, workspaceObj, &client.DeleteOptions{}) + }, utils.PollTimeout, utils.PollInterval).Should(Succeed(), "Failed to delete workspace") + + }) + It("should validate the workspace inference spec at update ", func() { workspaceObj := utils.GenerateInferenceWorkspaceManifest(fmt.Sprint("webhook-", rand.Intn(1000)), namespaceName, "", 1, "Standard_NC12s_v3", &metav1.LabelSelector{