Skip to content

Commit

Permalink
Merge branch 'master' into streaming-deck-v2
Browse files Browse the repository at this point in the history
  • Loading branch information
Future-Outlier committed Jan 9, 2025
2 parents 137579f + 27c9edd commit 04f7fbc
Show file tree
Hide file tree
Showing 224 changed files with 8,673 additions and 5,612 deletions.
95 changes: 94 additions & 1 deletion .github/workflows/single-binary.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ on:
branches:
- master
- rc/*
- 'release-v**'
- "release-v**"
workflow_dispatch:

jobs:
Expand Down Expand Up @@ -197,6 +197,99 @@ jobs:
--version ${{ env.FLYTESNACKS_VERSION }} \
flytesnacks/$line;
done < flytesnacks/flyte_tests.txt
- name: Install Pytest
run: |
pip install pytest
- name: End2End
run: |
make end2end_execute
sandbox-bundled-offloaded-functional-tests:
runs-on: ubuntu-latest
env:
FLYTESNACKS_PRIORITIES: "P0"
FLYTESNACKS_VERSION: ""
timeout-minutes: 60
needs: [build-and-push-single-binary-image]
steps:
- name: Set latest Flytesnacks release
if: ${{ env.FLYTESNACKS_VERSION == '' }}
run: |
FLYTESNACKS_VERSION="$(curl --silent https://api.github.com/repos/flyteorg/flytesnacks/releases/latest | jq -r .tag_name)"
echo "FLYTESNACKS_VERSION=${FLYTESNACKS_VERSION}" >> ${GITHUB_ENV}
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: single-binary-image
path: docker/sandbox-bundled/images/tar
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
driver-opts: image=moby/buildkit:master
buildkitd-flags: "--allow-insecure-entitlement security.insecure"
- name: Build sandbox image for functional tests
uses: docker/build-push-action@v6
with:
context: docker/sandbox-bundled
load: true
allow: "security.insecure"
tags: flyte-sandbox-bundled:local
# Without this, the GHA runner is under disk pressure and evicts all the pods.
# Buildx cache uses roughly 50% (7gb) of the GHA runner's disk (14gb).
- name: Prune Docker Buildx cache to reclaim storage
run: docker buildx prune --all --force
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
- uses: unionai/[email protected]
- name: Setup sandbox
run: |
mkdir -p ~/.flyte/sandbox
cat << EOF > ~/.flyte/sandbox/config.yaml
propeller:
literal-offloading-config:
enabled: true
min-size-in-mb-for-offloading: 0.000001
task_resources:
defaults:
cpu: "0"
memory: "0"
limits:
cpu: "0"
memory: "0"
EOF
flytectl demo start --image flyte-sandbox-bundled:local --imagePullPolicy Never
# By setting min-size-in-mb-for-offloading to 0.000001, we ensure that all tasks are offloaded.
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install uv
uv pip install --system flytekit flytekitplugins-deck-standard "numpy<2.0.0" pyarrow pandas
uv pip freeze
- name: Checkout flytesnacks
uses: actions/checkout@v4
with:
repository: flyteorg/flytesnacks
path: flytesnacks
ref: ${{ env.FLYTESNACKS_VERSION }}
- name: Register specific tests
run: |
flytekit_version=$(pip show flytekit | grep -i version | awk '{ print $2 }')
while read -r line;
do
pyflyte -vv --config ./boilerplate/flyte/end2end/functional-test-config.yaml \
register \
--project flytesnacks \
--domain development \
--image cr.flyte.org/flyteorg/flytekit:py3.12-${flytekit_version} \
--version ${{ env.FLYTESNACKS_VERSION }} \
flytesnacks/$line;
done < flytesnacks/flyte_tests.txt
- name: Install Pytest
run: |
pip install pytest
- name: End2End
run: |
make end2end_execute
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,4 @@ docs/examples
docs/_src
docs/_projects
docs/tests
empty-config.yaml
207 changes: 207 additions & 0 deletions CHANGELOG/CHANGELOG-v1.14.0.md

Large diffs are not rendered by default.

19 changes: 19 additions & 0 deletions CHANGELOG/CHANGELOG-v1.14.1.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Flyte 1.14.1 Release Notes

* Update flytestdlib and affected tools (copilot) for missing config.

## What's Changed
* docs: Refactor merge sort code example to use literalinclude by @davidlin20dev in https://github.com/flyteorg/flyte/pull/6091
* [DOCS] Using ImageSpec in ContainerTask by @machichima in https://github.com/flyteorg/flyte/pull/6095
* Eager doc updates by @wild-endeavor in https://github.com/flyteorg/flyte/pull/6099
* Revert "fix: return the config file not found error" by @eapolinario in https://github.com/flyteorg/flyte/pull/6100
* Remove notes on deprecated Batch size by @wild-endeavor in https://github.com/flyteorg/flyte/pull/6102
* Upstream: Add labels to published execution events by @katrogan in https://github.com/flyteorg/flyte/pull/6104
* Fix: Make distributed error aggregation opt-in by @fg91 in https://github.com/flyteorg/flyte/pull/6103
* Add default labels and annotations to Ray worker pods too. by @katrogan in https://github.com/flyteorg/flyte/pull/6107
* Fix: Remove the default search dialog if it exists (on CMD + K) by @chmod77 in https://github.com/flyteorg/flyte/pull/6106

## New Contributors
* @chmod77 made their first contribution in https://github.com/flyteorg/flyte/pull/6106

**Full Changelog**: https://github.com/flyteorg/flyte/compare/v1.14.0...v1.14.1
4 changes: 2 additions & 2 deletions README.md

Large diffs are not rendered by default.

8 changes: 6 additions & 2 deletions boilerplate/flyte/end2end/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@
end2end_execute: export FLYTESNACKS_PRIORITIES ?= P0
end2end_execute: export FLYTESNACKS_VERSION ?= $(shell curl --silent "https://api.github.com/repos/flyteorg/flytesnacks/releases/latest" | jq -r .tag_name)
end2end_execute:
./boilerplate/flyte/end2end/end2end.sh ./boilerplate/flyte/end2end/functional-test-config.yaml --return_non_zero_on_failure

pytest ./boilerplate/flyte/end2end/test_run.py \
--flytesnacks_release_tag=$(FLYTESNACKS_VERSION) \
--priorities=$(FLYTESNACKS_PRIORITIES) \
--config_file=./boilerplate/flyte/end2end/functional-test-config.yaml \
--return_non_zero_on_failure

.PHONY: k8s_integration_execute
k8s_integration_execute:
echo "pass"
47 changes: 47 additions & 0 deletions boilerplate/flyte/end2end/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import pytest

def pytest_addoption(parser):
parser.addoption("--flytesnacks_release_tag", required=True)
parser.addoption("--priorities", required=True)
parser.addoption("--config_file", required=True)
parser.addoption(
"--return_non_zero_on_failure",
action="store_true",
default=False,
help="Return a non-zero exit status if any workflow fails",
)
parser.addoption(
"--terminate_workflow_on_failure",
action="store_true",
default=False,
help="Abort failing workflows upon exit",
)
parser.addoption(
"--test_project_name",
default="flytesnacks",
help="Name of project to run functional tests on"
)
parser.addoption(
"--test_project_domain",
default="development",
help="Name of domain in project to run functional tests on"
)
parser.addoption(
"--cluster_pool_name",
required=False,
type=str,
default=None,
)

@pytest.fixture
def setup_flytesnacks_env(pytestconfig):
return {
"flytesnacks_release_tag": pytestconfig.getoption("--flytesnacks_release_tag"),
"priorities": pytestconfig.getoption("--priorities"),
"config_file": pytestconfig.getoption("--config_file"),
"return_non_zero_on_failure": pytestconfig.getoption("--return_non_zero_on_failure"),
"terminate_workflow_on_failure": pytestconfig.getoption("--terminate_workflow_on_failure"),
"test_project_name": pytestconfig.getoption("--test_project_name"),
"test_project_domain": pytestconfig.getoption("--test_project_domain"),
"cluster_pool_name": pytestconfig.getoption("--cluster_pool_name"),
}
12 changes: 0 additions & 12 deletions boilerplate/flyte/end2end/end2end.sh

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import traceback
from typing import Dict, List, Optional

import click
import pytest
import requests
from flytekit.configuration import Config
from flytekit.models.core.execution import WorkflowExecutionPhase
Expand All @@ -15,7 +15,6 @@
WAIT_TIME = 10
MAX_ATTEMPTS = 200


def execute_workflow(
remote: FlyteRemote,
version,
Expand All @@ -27,7 +26,6 @@ def execute_workflow(
wf = remote.fetch_workflow(name=workflow_name, version=version)
return remote.execute(wf, inputs=inputs, wait=False, cluster_pool=cluster_pool_name)


def executions_finished(
executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]]
) -> bool:
Expand All @@ -36,7 +34,6 @@ def executions_finished(
return False
return True


def sync_executions(
remote: FlyteRemote, executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]]
):
Expand All @@ -50,13 +47,11 @@ def sync_executions(
print("GOT TO THE EXCEPT")
print("COUNT THIS!")


def report_executions(executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]]):
for executions in executions_by_wfgroup.values():
for execution in executions:
print(execution)


def schedule_workflow_groups(
tag: str,
workflow_groups: List[str],
Expand All @@ -65,10 +60,6 @@ def schedule_workflow_groups(
parsed_manifest: List[dict],
cluster_pool_name: Optional[str] = None,
) -> Dict[str, bool]:
"""
Schedule workflows executions for all workflow groups and return True if all executions succeed, otherwise
return False.
"""
executions_by_wfgroup = {}
# Schedule executions for each workflow group,
for wf_group in workflow_groups:
Expand Down Expand Up @@ -120,30 +111,32 @@ def schedule_workflow_groups(
results[wf_group] = len(non_succeeded_executions) == 0
return results


def valid(workflow_group, parsed_manifest):
"""
Return True if a workflow group is contained in parsed_manifest,
False otherwise.
"""
return workflow_group in set(wf_group["name"] for wf_group in parsed_manifest)

def test_run(setup_flytesnacks_env):

env = setup_flytesnacks_env

flytesnacks_release_tag = env["flytesnacks_release_tag"]
priorities = env["priorities"]
config_file_path = env["config_file"]
terminate_workflow_on_failure = env["terminate_workflow_on_failure"]
test_project_name = env["test_project_name"]
test_project_domain = env["test_project_domain"]
cluster_pool_name = env["cluster_pool_name"]
return_non_zero_on_failure = env["return_non_zero_on_failure"]

def run(
flytesnacks_release_tag: str,
priorities: List[str],
config_file_path,
terminate_workflow_on_failure: bool,
test_project_name: str,
test_project_domain: str,
cluster_pool_name: Optional[str] = None,
) -> List[Dict[str, str]]:
remote = FlyteRemote(
Config.auto(config_file=config_file_path),
test_project_name,
test_project_domain,
)

# For a given release tag and priority, this function filters the workflow groups from the flytesnacks
# manifest file. For example, for the release tag "v0.2.224" and the priority "P0" it returns [ "core" ].
manifest_url = (
Expand Down Expand Up @@ -210,75 +203,15 @@ def run(
"color": background_color,
}
results.append(result)
return results


@click.command()
@click.argument("flytesnacks_release_tag")
@click.argument("priorities")
@click.argument("config_file")
@click.option(
"--return_non_zero_on_failure",
default=False,
is_flag=True,
help="Return a non-zero exit status if any workflow fails",
)
@click.option(
"--terminate_workflow_on_failure",
default=False,
is_flag=True,
help="Abort failing workflows upon exit",
)
@click.option(
"--test_project_name",
default="flytesnacks",
type=str,
is_flag=False,
help="Name of project to run functional tests on",
)
@click.option(
"--test_project_domain",
default="development",
type=str,
is_flag=False,
help="Name of domain in project to run functional tests on",
)
@click.argument(
"cluster_pool_name",
required=False,
type=str,
default=None,
)
def cli(
flytesnacks_release_tag,
priorities,
config_file,
return_non_zero_on_failure,
terminate_workflow_on_failure,
test_project_name,
test_project_domain,
cluster_pool_name,
):
print(f"return_non_zero_on_failure={return_non_zero_on_failure}")
results = run(
flytesnacks_release_tag,
priorities,
config_file,
terminate_workflow_on_failure,
test_project_name,
test_project_domain,
cluster_pool_name,
)

# Write a json object in its own line describing the result of this run to stdout
print(f"Result of run:\n{json.dumps(results)}")

# Return a non-zero exit code if core fails
if return_non_zero_on_failure:
for result in results:
if result["status"] not in ("passing", "coming soon"):
sys.exit(1)


if __name__ == "__main__":
cli()
fail_results = [result for result in results if result["status"] not in ("passing", "coming soon")]
if fail_results:
fail_msgs = [
f"Workflow '{r['label']}' failed with status '{r['status']}'" for r in fail_results
]
pytest.fail("\n".join(fail_msgs))

assert results == [{"label": "core", "status": "passing", "color": "green"}]
Loading

0 comments on commit 04f7fbc

Please sign in to comment.