Skip to content

Commit

Permalink
Merge branch 'main' into florence2-aliases
Browse files Browse the repository at this point in the history
  • Loading branch information
probicheaux committed Jul 19, 2024
2 parents c55cbcf + 09dde63 commit 376437f
Show file tree
Hide file tree
Showing 94 changed files with 8,731 additions and 258 deletions.
9 changes: 9 additions & 0 deletions .github/workflows/load_test_hosted_inference.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ on:
- object-detection
- instance-segmentation
- classification
- workflows

jobs:
build:
Expand Down Expand Up @@ -49,6 +50,10 @@ jobs:
if: ${{ github.event.inputs.environment == 'production' && github.event.inputs.model_type == 'classification' }}
run: |
ROBOFLOW_API_KEY=${{ secrets.LOAD_TEST_PRODUCTION_API_KEY }} python -m inference_cli.main benchmark api-speed -m vehicle-classification-eapcd/2 -d coco -rps 5 -br 500 -h https://classify.roboflow.com --yes --output_location test_results.json
- name: 🏋️‍♂️ Load test 🚨 PRODUCTION 🚨 | workflows 🔥🔥🔥🔥
if: ${{ github.event.inputs.environment == 'production' && github.event.inputs.model_type == 'workflows' }}
run: |
ROBOFLOW_API_KEY=${{ secrets.LOAD_TEST_PRODUCTION_API_KEY }} python -m inference_cli.main benchmark api-speed -wid workflows-production-test -wn paul-guerrie-tang1 -d coco -rps 5 -br 500 -h https://classify.roboflow.com --yes --output_location test_results.json
- name: 🏋️‍♂️ Load test 😎 STAGING 😎 | object-detection 🔥🔥🔥🔥
if: ${{ github.event.inputs.environment == 'staging' && github.event.inputs.model_type == 'object-detection' }}
Expand All @@ -62,5 +67,9 @@ jobs:
if: ${{ github.event.inputs.environment == 'staging' && github.event.inputs.model_type == 'classification' }}
run: |
ROBOFLOW_API_KEY=${{ secrets.LOAD_TEST_STAGING_API_KEY }} python -m inference_cli.main benchmark api-speed -m catdog/28 -d coco -rps 5 -br 500 -h https://lambda-classification.staging.roboflow.com --legacy-endpoints --yes --output_location test_results.json
- name: 🏋️‍♂️ Load test 😎 STAGING 😎 | workflows 🔥🔥🔥🔥
if: ${{ github.event.inputs.environment == 'staging' && github.event.inputs.model_type == 'workflows' }}
run: |
ROBOFLOW_API_KEY=${{ secrets.LOAD_TEST_STAGING_API_KEY }} python -m inference_cli.main benchmark api-speed -wid workflows-staging-test -wn paul-guerrie -d coco -rps 5 -br 500 -h https://lambda-classification.staging.roboflow.com --legacy-endpoints --yes --output_location test_results.json
- name: 📈 RESULTS
run: cat test_results.json | jq
2 changes: 1 addition & 1 deletion development/docs/build_block_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def main() -> None:
token=AUTOGENERATED_BLOCKS_LIST_TOKEN,
)
block_card_lines = []
blocks_description = describe_available_blocks()
blocks_description = describe_available_blocks(dynamic_blocks=[])
block_type2manifest_type_identifier = {
block.block_class: block.manifest_type_identifier
for block in blocks_description.blocks
Expand Down
1 change: 1 addition & 0 deletions docker/dockerfiles/Dockerfile.onnx.lambda
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ ENV API_LOGGING_ENABLED=True
ENV MODEL_VALIDATION_DISABLED=True
ENV ALLOW_NON_HTTPS_URL_INPUT=False
ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False
ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False

WORKDIR ${LAMBDA_TASK_ROOT}
RUN rm -rf /build
Expand Down
1 change: 1 addition & 0 deletions docker/dockerfiles/Dockerfile.onnx.lambda.slim
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ ENV API_LOGGING_ENABLED=True
ENV MODEL_VALIDATION_DISABLED=True
ENV ALLOW_NON_HTTPS_URL_INPUT=False
ENV ALLOW_URL_INPUT_WITHOUT_FQDN=False
ENV ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS=False

WORKDIR ${LAMBDA_TASK_ROOT}

Expand Down
1 change: 1 addition & 0 deletions docs/workflows/blocks.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ hide:
<p class="card block-card" data-url="continue_if" data-name="ContinueIf" data-desc="Stops execution of processing branch under certain condition" data-labels="FLOW_CONTROL, APACHE-2.0" data-author=""></p>
<p class="card block-card" data-url="perspective_correction" data-name="PerspectiveCorrection" data-desc="Correct coordinates of detections from plane defined by given polygon to straight rectangular plane of given width and height" data-labels="TRANSFORMATION, APACHE-2.0" data-author=""></p>
<p class="card block-card" data-url="dynamic_zone" data-name="DynamicZone" data-desc="Simplify polygons so they are geometrically convex and simplify them to contain only requested amount of vertices" data-labels="TRANSFORMATION, APACHE-2.0" data-author=""></p>
<p class="card block-card" data-url="custom_python" data-name="CustomPython" data-desc="" data-labels=", " data-author=""></p>
<!--- AUTOGENERATED_BLOCKS_LIST -->
</div>
</div>
Expand Down
28 changes: 14 additions & 14 deletions docs/workflows/kinds.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,26 @@ resolved we need a simple type system - that's what we call `kinds`.

## List of `workflows` kinds
<!--- AUTOGENERATED_KINDS_LIST -->
* [`roboflow_project`](/workflows/kinds/roboflow_project): Roboflow project name
* [`dictionary`](/workflows/kinds/dictionary): Dictionary
* [`string`](/workflows/kinds/string): String value
* [`list_of_values`](/workflows/kinds/list_of_values): List of values of any types
* [`*`](/workflows/kinds/*): Equivalent of any element
* [`Batch[dictionary]`](/workflows/kinds/batch_dictionary): Batch of dictionaries
* [`Batch[keypoint_detection_prediction]`](/workflows/kinds/batch_keypoint_detection_prediction): `'predictions'` key from Keypoint Detection Model output
* [`Batch[parent_id]`](/workflows/kinds/batch_parent_id): Identifier of parent for step output
* [`Batch[classification_prediction]`](/workflows/kinds/batch_classification_prediction): `'predictions'` key from Classification Model outputs
* [`roboflow_model_id`](/workflows/kinds/roboflow_model_id): Roboflow model id
* [`Batch[top_class]`](/workflows/kinds/batch_top_class): Batch of string values representing top class predicted by classification model
* [`integer`](/workflows/kinds/integer): Integer value
* [`dictionary`](/workflows/kinds/dictionary): Dictionary
* [`Batch[classification_prediction]`](/workflows/kinds/batch_classification_prediction): `'predictions'` key from Classification Model outputs
* [`Batch[boolean]`](/workflows/kinds/batch_boolean): Boolean values batch
* [`boolean`](/workflows/kinds/boolean): Boolean flag
* [`Batch[prediction_type]`](/workflows/kinds/batch_prediction_type): String value with type of prediction
* [`Batch[parent_id]`](/workflows/kinds/batch_parent_id): Identifier of parent for step output
* [`string`](/workflows/kinds/string): String value
* [`Batch[instance_segmentation_prediction]`](/workflows/kinds/batch_instance_segmentation_prediction): `'predictions'` key from Instance Segmentation Model outputs
* [`*`](/workflows/kinds/*): Equivalent of any element
* [`integer`](/workflows/kinds/integer): Integer value
* [`float_zero_to_one`](/workflows/kinds/float_zero_to_one): `float` value in range `[0.0, 1.0]`
* [`Batch[image_metadata]`](/workflows/kinds/batch_image_metadata): Dictionary with image metadata required by supervision
* [`Batch[bar_code_detection]`](/workflows/kinds/batch_bar_code_detection): Prediction with barcode detection
* [`Batch[image]`](/workflows/kinds/batch_image): Image in workflows
* [`roboflow_project`](/workflows/kinds/roboflow_project): Roboflow project name
* [`Batch[string]`](/workflows/kinds/batch_string): Batch of string values
* [`list_of_values`](/workflows/kinds/list_of_values): List of values of any types
* [`Batch[boolean]`](/workflows/kinds/batch_boolean): Boolean values batch
* [`Batch[object_detection_prediction]`](/workflows/kinds/batch_object_detection_prediction): `'predictions'` key from Object Detection Model output
* [`float_zero_to_one`](/workflows/kinds/float_zero_to_one): `float` value in range `[0.0, 1.0]`
* [`Batch[prediction_type]`](/workflows/kinds/batch_prediction_type): String value with type of prediction
* [`Batch[keypoint_detection_prediction]`](/workflows/kinds/batch_keypoint_detection_prediction): `'predictions'` key from Keypoint Detection Model output
* [`Batch[bar_code_detection]`](/workflows/kinds/batch_bar_code_detection): Prediction with barcode detection
* [`roboflow_model_id`](/workflows/kinds/roboflow_model_id): Roboflow model id
<!--- AUTOGENERATED_KINDS_LIST -->
10 changes: 10 additions & 0 deletions inference/core/entities/requests/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

from pydantic import BaseModel, Field

from inference.core.workflows.execution_engine.dynamic_blocks.entities import (
DynamicBlockDefinition,
)


class WorkflowInferenceRequest(BaseModel):
api_key: str = Field(
Expand All @@ -18,3 +22,9 @@ class WorkflowInferenceRequest(BaseModel):

class WorkflowSpecificationInferenceRequest(WorkflowInferenceRequest):
specification: dict


class DescribeBlocksRequest(BaseModel):
dynamic_blocks_definitions: List[DynamicBlockDefinition] = Field(
default_factory=list, description="Dynamic blocks to be used."
)
3 changes: 3 additions & 0 deletions inference/core/entities/responses/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,3 +146,6 @@ class WorkflowsBlocksDescription(BaseModel):
universal_query_language_description: UniversalQueryLanguageDescription = Field(
description="Definitions of Universal Query Language operations and operators"
)
dynamic_block_definition_schema: dict = Field(
description="Schema for dynamic block definition"
)
3 changes: 3 additions & 0 deletions inference/core/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,9 @@
WORKFLOWS_REMOTE_EXECUTION_MAX_STEP_CONCURRENT_REQUESTS = int(
os.getenv("WORKFLOWS_REMOTE_EXECUTION_MAX_STEP_CONCURRENT_REQUESTS", "8")
)
ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS = str2bool(
os.getenv("ALLOW_CUSTOM_PYTHON_EXECUTION_IN_WORKFLOWS", True)
)

MODEL_VALIDATION_DISABLED = str2bool(os.getenv("MODEL_VALIDATION_DISABLED", "False"))

Expand Down
1 change: 1 addition & 0 deletions inference/core/interfaces/camera/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ class VideoFrame:
image: np.ndarray
frame_id: FrameID
frame_timestamp: FrameTimestamp
fps: float = 0
source_id: Optional[int] = None


Expand Down
3 changes: 3 additions & 0 deletions inference/core/interfaces/camera/video_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -888,6 +888,7 @@ def _consume_stream_frame(
buffer=buffer,
decoding_pace_monitor=self._decoding_pace_monitor,
source_id=source_id,
fps=declared_source_fps,
)
if self._buffer_filling_strategy in DROP_OLDEST_STRATEGIES:
return self._process_stream_frame_dropping_oldest(
Expand Down Expand Up @@ -1082,6 +1083,7 @@ def decode_video_frame_to_buffer(
buffer: Queue,
decoding_pace_monitor: sv.FPSMonitor,
source_id: Optional[int],
fps: float = 0,
) -> bool:
success, image = video.retrieve()
if not success:
Expand All @@ -1091,6 +1093,7 @@ def decode_video_frame_to_buffer(
image=image,
frame_id=frame_id,
frame_timestamp=frame_timestamp,
fps=fps,
source_id=source_id,
)
buffer.put(video_frame)
Expand Down
Empty file.
78 changes: 78 additions & 0 deletions inference/core/interfaces/http/handlers/workflows.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# TODO - for everyone: start migrating other handlers to bring relief to http_api.py
from typing import List, Optional

from inference.core.entities.responses.workflows import (
ExternalBlockPropertyPrimitiveDefinition,
ExternalWorkflowsBlockSelectorDefinition,
UniversalQueryLanguageDescription,
WorkflowsBlocksDescription,
)
from inference.core.workflows.core_steps.common.query_language.introspection.core import (
prepare_operations_descriptions,
prepare_operators_descriptions,
)
from inference.core.workflows.execution_engine.dynamic_blocks.block_assembler import (
compile_dynamic_blocks,
)
from inference.core.workflows.execution_engine.dynamic_blocks.entities import (
DynamicBlockDefinition,
)
from inference.core.workflows.execution_engine.introspection.blocks_loader import (
describe_available_blocks,
)
from inference.core.workflows.execution_engine.introspection.connections_discovery import (
discover_blocks_connections,
)


def handle_describe_workflows_blocks_request(
dynamic_blocks_definitions: Optional[List[DynamicBlockDefinition]] = None,
) -> WorkflowsBlocksDescription:
if dynamic_blocks_definitions is None:
dynamic_blocks_definitions = []
dynamic_blocks = compile_dynamic_blocks(
dynamic_blocks_definitions=dynamic_blocks_definitions,
)
blocks_description = describe_available_blocks(dynamic_blocks=dynamic_blocks)
blocks_connections = discover_blocks_connections(
blocks_description=blocks_description,
)
kinds_connections = {
kind_name: [
ExternalWorkflowsBlockSelectorDefinition(
manifest_type_identifier=c.manifest_type_identifier,
property_name=c.property_name,
property_description=c.property_description,
compatible_element=c.compatible_element,
is_list_element=c.is_list_element,
is_dict_element=c.is_dict_element,
)
for c in connections
]
for kind_name, connections in blocks_connections.kinds_connections.items()
}
primitives_connections = [
ExternalBlockPropertyPrimitiveDefinition(
manifest_type_identifier=primitives_connection.manifest_type_identifier,
property_name=primitives_connection.property_name,
property_description=primitives_connection.property_description,
type_annotation=primitives_connection.type_annotation,
)
for primitives_connection in blocks_connections.primitives_connections
]
uql_operations_descriptions = prepare_operations_descriptions()
uql_operators_descriptions = prepare_operators_descriptions()
universal_query_language_description = (
UniversalQueryLanguageDescription.from_internal_entities(
operations_descriptions=uql_operations_descriptions,
operators_descriptions=uql_operators_descriptions,
)
)
return WorkflowsBlocksDescription(
blocks=blocks_description.blocks,
declared_kinds=blocks_description.declared_kinds,
kinds_connections=kinds_connections,
primitives_connections=primitives_connections,
universal_query_language_description=universal_query_language_description,
dynamic_block_definition_schema=DynamicBlockDefinition.schema(),
)
Loading

0 comments on commit 376437f

Please sign in to comment.