Skip to content

Commit

Permalink
Merge branch 'main' into global-concurrency-tags
Browse files Browse the repository at this point in the history
  • Loading branch information
abrookins authored Jul 24, 2024
2 parents 5a9152a + 09b7a48 commit 782026c
Show file tree
Hide file tree
Showing 24 changed files with 1,026 additions and 324 deletions.
14 changes: 7 additions & 7 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,30 +5,30 @@ updates:
directory: "/"
schedule:
interval: "daily"
labels: ["maintenance"]
labels: ["development"]

- package-ecosystem: "npm"
directory: '/ui/'
schedule:
interval: "weekly"
labels: ["ui", "maintenance"]
labels: ["ui", "development"]

- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
labels: ["maintenance"]
labels: ["development"]

- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "daily"
labels: ["maintenance"]
labels: ["development"]

# Check for Python updates in v1
# Check for Python updates in v2
- package-ecosystem: "pip"
target-branch: "1.x"
target-branch: "2.x"
directory: "/"
schedule:
interval: "daily"
labels: ["maintenance", "v1"]
labels: ["development", "2.x"]
2 changes: 1 addition & 1 deletion .github/workflows/npm_update_latest_prefect.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
--title "Update @${{ inputs.package_name }} to version ${{ inputs.package_version }}" \
--body "Update @${{ inputs.package_name }} to version ${{ inputs.package_version }}.
Release information can be found at https://github.com/${{ inputs.package_name }}/releases/tag/${{ inputs.package_version }}." \
--label maintenance \
--label development \
--label ui
env:
GITHUB_TOKEN: ${{ secrets.PREFECT_CONTENTS_PR_RW }}
Expand Down
135 changes: 0 additions & 135 deletions .github/workflows/python-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -219,141 +219,6 @@ jobs:
env:
SLACK_WEBHOOK_URL: ${{ secrets.ENGINEERING_REVIEW_SLACK_WEBHOOK_URL }}

run-tests-for-datadog:
name: DataDog CI Visibility
if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || contains(github.event.head_commit.message, '.github/workflows/python-tests.yaml'))
runs-on:
group: oss-larger-runners
strategy:
matrix:
database:
- "postgres:14"
python-version:
- "3.12"

fail-fast: true

timeout-minutes: 45

steps:
- name: Display current test matrix
run: echo '${{ toJSON(matrix) }}'

- uses: actions/checkout@v4
with:
persist-credentials: false
fetch-depth: 0

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: image=moby/buildkit:v0.12.5

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
id: setup_python
with:
python-version: ${{ matrix.python-version }}

- name: UV Cache
# Manually cache the uv cache directory
# until setup-python supports it:
# https://github.com/actions/setup-python/issues/822
uses: actions/cache@v4
id: cache-uv
with:
path: ~/.cache/uv
key: uvcache-${{ runner.os }}-${{ steps.setup_python.outputs.python-version }}-${{ hashFiles('requirements-client.txt', 'requirements.txt', 'requirements-dev.txt') }}

- name: Get image tag
id: get_image_tag
run: |
SHORT_SHA=$(git rev-parse --short=7 HEAD)
tmp="sha-$SHORT_SHA-python${{ matrix.python-version }}"
echo "image_tag=${tmp}" >> $GITHUB_OUTPUT
- name: Build test image
uses: docker/build-push-action@v6
with:
context: .
build-args: |
PYTHON_VERSION=${{ matrix.python-version }}
PREFECT_EXTRAS=[dev]
tags: prefecthq/prefect-dev:${{ steps.get_image_tag.outputs.image_tag }}
outputs: type=docker,dest=/tmp/image.tar

- name: Test Docker image
run: |
docker load --input /tmp/image.tar
docker run --rm prefecthq/prefect-dev:${{ steps.get_image_tag.outputs.image_tag }} prefect version
- name: Install packages
run: |
python -m pip install -U uv
uv pip install --upgrade --system -e .[dev]
- name: Start database container
run: >
docker run
--name "postgres"
--detach
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
--publish 5432:5432
--tmpfs /var/lib/postgresql/data
--env POSTGRES_USER="prefect"
--env POSTGRES_PASSWORD="prefect"
--env POSTGRES_DB="prefect"
--env LANG="C.UTF-8"
--env LANGUAGE="C.UTF-8"
--env LC_ALL="C.UTF-8"
--env LC_COLLATE="C.UTF-8"
--env LC_CTYPE="C.UTF-8"
${{ matrix.database }}
-c max_connections=250
./scripts/wait-for-healthy-container.sh postgres 30
echo "PREFECT_API_DATABASE_CONNECTION_URL=postgresql+asyncpg://prefect:prefect@localhost/prefect" >> $GITHUB_ENV
- name: Start docker registry
run: >
docker run
--name "prefect-test-registry"
--detach
--publish 5555:5000
registry:2
- name: Start redis
run: >
docker run
--name "redis"
--detach
--publish 6379:6379
redis:latest
- name: Run tests
env:
PREFECT_EXPERIMENTAL_ENABLE_PYDANTIC_V2_INTERNALS: "1"
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: ${{ secrets.DD_API_KEY_CI_VISIBILITY }}
DD_SITE: datadoghq.com
DD_ENV: ci
DD_SERVICE: prefect
run: >
pytest tests
--numprocesses auto
--maxprocesses 6
--ddtrace
--dist worksteal
--disable-docker-image-builds
--exclude-service kubernetes
--durations 26
--cov
--cov-config setup.cfg
run-docker-tests:
runs-on:
group: oss-larger-runners
Expand Down
12 changes: 12 additions & 0 deletions docs/3.0rc/api-ref/rest-api/server/schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -16000,6 +16000,18 @@
"type": "object",
"title": "Parameters"
},
"enforce_parameter_schema": {
"anyOf": [
{
"type": "boolean"
},
{
"type": "null"
}
],
"title": "Enforce Parameter Schema",
"description": "Whether or not to enforce the parameter schema on this run."
},
"context": {
"type": "object",
"title": "Context"
Expand Down
10 changes: 5 additions & 5 deletions docs/3.0rc/develop/results.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ Any settings _explicitly_ set on a task take precedence over the flow settings.
### Result storage

You can configure the system of record for your results through the `result_storage` keyword argument.
This keyword accepts a [filesystem block](/3.0rc/develop/blocks/) or a block document slug.
This keyword accepts an instantiated [filesystem block](/3.0rc/develop/blocks/), or a block slug. Find your blocks' slugs with `prefect block ls`.
Note that if you want your tasks to share a common cache, your result storage should be accessible by
the infrastructure in which those tasks run.
the infrastructure in which those tasks run. [Integrations](/integrations/integrations) have cloud-specific storage blocks.
For example, a common distributed filesystem for result storage is AWS S3.

```python
Expand All @@ -104,7 +104,7 @@ unpersisted_task = my_task.with_options(persist_result=False)
other_storage_task = my_task.with_options(result_storage=test_block)


@flow(result_storage='s3/my-dev-bucket')
@flow(result_storage='s3-bucket/my-dev-block')
def my_flow():

# this task will use the flow's result storage
Expand All @@ -113,7 +113,7 @@ def my_flow():
# this task will not persist results at all
unpersisted_task()

# this task will persist results to its own test bucket using a different S3 block
# this task will persist results to its own bucket using a different S3 block
other_storage_task()
```

Expand All @@ -125,7 +125,7 @@ Specifying a block document slug here will enable result persistence using that
For example:

```bash
prefect config set PREFECT_DEFAULT_RESULT_STORAGE_BLOCK='s3/my-prod-bucket'
prefect config set PREFECT_DEFAULT_RESULT_STORAGE_BLOCK='s3-bucket/my-prod-block'
```

<Info>
Expand Down
2 changes: 1 addition & 1 deletion requirements-client.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ asgi-lifespan >= 1.0, < 3.0
cachetools >= 5.3, < 6.0
cloudpickle >= 2.0, < 4.0
coolname >= 1.0.4, < 3.0.0
croniter >= 1.0.12, < 3.0.0
croniter >= 1.0.12, < 4.0.0
exceptiongroup >= 1.0.0
fastapi >= 0.111.0, < 1.0.0
fsspec >= 2022.5.0
Expand Down
1 change: 0 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
ruff
cairosvg
codespell>=2.2.6
ddtrace
ipython
jinja2
moto >= 5
Expand Down
31 changes: 28 additions & 3 deletions src/integrations/prefect-email/prefect_email/credentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@
from pydantic import Field, SecretStr, field_validator

from prefect.blocks.core import Block
from prefect.logging.loggers import get_logger, get_run_logger

internal_logger = get_logger(__name__)


class SMTPType(Enum):
Expand Down Expand Up @@ -82,6 +85,7 @@ class EmailServerCredentials(Block):
keys from the built-in SMTPServer Enum members, like "gmail".
smtp_type: Either "SSL", "STARTTLS", or "INSECURE".
smtp_port: If provided, overrides the smtp_type's default port number.
verify: If `False`, SSL certificates will not be verified. Default to `True`.
Example:
Load stored email server credentials:
Expand Down Expand Up @@ -128,6 +132,13 @@ class EmailServerCredentials(Block):
title="SMTP Port",
)

verify: Optional[bool] = Field(
default=True,
description=(
"If `False`, SSL certificates will not be verified. Default to `True`."
),
)

@field_validator("smtp_server", mode="before")
def _cast_smtp_server(cls, value: str):
"""
Expand Down Expand Up @@ -182,13 +193,27 @@ def example_get_server_flow():
if smtp_type == SMTPType.INSECURE:
server = SMTP(smtp_server, smtp_port)
else:
context = ssl.create_default_context()
context = (
ssl.create_default_context()
if self.verify
else ssl._create_unverified_context(protocol=ssl.PROTOCOL_TLS_CLIENT)
)
if smtp_type == SMTPType.SSL:
server = SMTP_SSL(smtp_server, smtp_port, context=context)
elif smtp_type == SMTPType.STARTTLS:
server = SMTP(smtp_server, smtp_port)
server.starttls(context=context)
if self.username is not None:
server.login(self.username, self.password.get_secret_value())
if self.username is not None:
if not self.verify or smtp_type == SMTPType.INSECURE:
try:
logger = get_run_logger()
except Exception:
logger = internal_logger
logger.warning(
"""SMTP login is not secure without a verified SSL/TLS or SECURE connection.
Without such a connection, the password may be sent in plain text,
making it vulnerable to interception."""
)
server.login(self.username, self.password.get_secret_value())

return server
18 changes: 18 additions & 0 deletions src/integrations/prefect-email/tests/test_credentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,21 @@ def test_email_service_credentials_roundtrip_smtp_type_enum(smtp, smtp_type):
assert credentials.smtp_type == SMTPType.STARTTLS
server = credentials.get_server()
assert server.port == 587


@pytest.mark.parametrize("smtp_type", [SMTPType.STARTTLS, "STARTTLS", 587])
@pytest.mark.parametrize("verify", [False])
def test_email_service_credentials_unverified_context(smtp, smtp_type, verify):
email_server_credentials = EmailServerCredentials(
smtp_server="us-smtp-outbound-1.mimecast.com",
smtp_type=smtp_type,
username="username",
password="password",
verify=verify,
)
email_server_credentials.save("email-credentials", overwrite=True)
credentials = EmailServerCredentials.load("email-credentials")
assert credentials.smtp_type == SMTPType.STARTTLS
assert credentials.verify is False
server = credentials.get_server()
assert server.port == 587
4 changes: 4 additions & 0 deletions src/prefect/client/schemas/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,10 @@ class DeploymentFlowRunCreate(ActionBaseModel):
parameters: Dict[str, Any] = Field(
default_factory=dict, description="The parameters for the flow run."
)
enforce_parameter_schema: Optional[bool] = Field(
default=None,
description="Whether or not to enforce the parameter schema on this run.",
)
context: Dict[str, Any] = Field(
default_factory=dict, description="The context for the flow run."
)
Expand Down
2 changes: 1 addition & 1 deletion src/prefect/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -798,7 +798,7 @@ def serve(
cron: Optional[Union[Iterable[str], str]] = None,
rrule: Optional[Union[Iterable[str], str]] = None,
paused: Optional[bool] = None,
schedules: Optional[List["FlexibleScheduleList"]] = None,
schedules: Optional["FlexibleScheduleList"] = None,
schedule: Optional[SCHEDULE_TYPES] = None,
is_schedule_active: Optional[bool] = None,
triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
Expand Down
Loading

0 comments on commit 782026c

Please sign in to comment.