Skip to content

Commit

Permalink
Merge branch 'main' into feat/prompt-key-required-field
Browse files Browse the repository at this point in the history
Signed-off-by: vishnuszipstack <[email protected]>
  • Loading branch information
vishnuszipstack authored Dec 20, 2024
2 parents b4a7746 + ee41dff commit 9000cd0
Show file tree
Hide file tree
Showing 240 changed files with 1,465 additions and 14,588 deletions.
12 changes: 12 additions & 0 deletions backend/account_v2/authentication_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,25 @@ def remove_user_from_organization_by_user_id(
Parameters:
user_id (str): The user_id of the users to remove.
"""

organization_user = OrganizationMemberService.get_user_by_user_id(user_id)
if not organization_user:
logger.warning(
f"User removal skipped: User '{user_id}' not found in "
f"organization '{organization_id}'."
)
return

# removing user from organization
OrganizationMemberService.remove_user_by_user_id(user_id)

# removing user m2m relations , while removing user
User.objects.get(user_id=user_id).prompt_registries.clear()
User.objects.get(user_id=user_id).shared_custom_tools.clear()
User.objects.get(user_id=user_id).shared_adapters_instance.clear()

# removing user from organization cache
OrganizationMemberService.remove_user_membership_in_organization_cache(
user_id=user_id, organization_id=organization_id
)
logger.info(f"User '{user_id}' removed from organization '{organization_id}'")
24 changes: 16 additions & 8 deletions backend/api_v2/api_deployment_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
APIDeploymentListSerializer,
APIDeploymentSerializer,
DeploymentResponseSerializer,
ExecutionQuerySerializer,
ExecutionRequestSerializer,
)
from django.db.models import QuerySet
Expand Down Expand Up @@ -51,6 +52,7 @@ def post(
serializer.is_valid(raise_exception=True)
timeout = serializer.validated_data.get(ApiExecution.TIMEOUT_FORM_DATA)
include_metadata = serializer.validated_data.get(ApiExecution.INCLUDE_METADATA)
include_metrics = serializer.validated_data.get(ApiExecution.INCLUDE_METRICS)
use_file_history = serializer.validated_data.get(ApiExecution.USE_FILE_HISTORY)
if not file_objs or len(file_objs) == 0:
raise InvalidAPIRequest("File shouldn't be empty")
Expand All @@ -60,6 +62,7 @@ def post(
file_objs=file_objs,
timeout=timeout,
include_metadata=include_metadata,
include_metrics=include_metrics,
use_file_history=use_file_history,
)
if "error" in response and response["error"]:
Expand All @@ -73,21 +76,26 @@ def post(
def get(
self, request: Request, org_name: str, api_name: str, api: APIDeployment
) -> Response:
execution_id = request.query_params.get("execution_id")
include_metadata = (
request.query_params.get(ApiExecution.INCLUDE_METADATA, "false").lower()
== "true"
)
if not execution_id:
raise InvalidAPIRequest("execution_id shouldn't be empty")
serializer = ExecutionQuerySerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)

execution_id = serializer.validated_data.get(ApiExecution.EXECUTION_ID)
include_metadata = serializer.validated_data.get(ApiExecution.INCLUDE_METADATA)
include_metrics = serializer.validated_data.get(ApiExecution.INCLUDE_METRICS)

# Fetch execution status
response: ExecutionResponse = DeploymentHelper.get_execution_status(
execution_id=execution_id
execution_id
)

# Determine response status
response_status = status.HTTP_422_UNPROCESSABLE_ENTITY
if response.execution_status == CeleryTaskState.COMPLETED.value:
response_status = status.HTTP_200_OK
if not include_metadata:
response.remove_result_metadata_keys()
if not include_metrics:
response.remove_result_metrics()
if response.result_acknowledged:
response_status = status.HTTP_406_NOT_ACCEPTABLE
response.result = "Result already acknowledged"
Expand Down
2 changes: 2 additions & 0 deletions backend/api_v2/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,6 @@ class ApiExecution:
FILES_FORM_DATA: str = "files"
TIMEOUT_FORM_DATA: str = "timeout"
INCLUDE_METADATA: str = "include_metadata"
INCLUDE_METRICS: str = "include_metrics"
USE_FILE_HISTORY: str = "use_file_history" # Undocumented parameter
EXECUTION_ID: str = "execution_id"
3 changes: 3 additions & 0 deletions backend/api_v2/deployment_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ def execute_workflow(
file_objs: list[UploadedFile],
timeout: int,
include_metadata: bool = False,
include_metrics: bool = False,
use_file_history: bool = False,
) -> ReturnDict:
"""Execute workflow by api.
Expand Down Expand Up @@ -180,6 +181,8 @@ def execute_workflow(
)
if not include_metadata:
result.remove_result_metadata_keys()
if not include_metrics:
result.remove_result_metrics()
except Exception as error:
DestinationConnector.delete_api_storage_dir(
workflow_id=workflow_id, execution_id=execution_id
Expand Down
2 changes: 2 additions & 0 deletions backend/api_v2/postman_collection/dto.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def get_form_data_items(self) -> list[FormDataItem]:
value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC,
),
FormDataItem(key=ApiExecution.INCLUDE_METADATA, type="text", value="False"),
FormDataItem(key=ApiExecution.INCLUDE_METRICS, type="text", value="False"),
]

def get_api_key(self) -> str:
Expand All @@ -131,6 +132,7 @@ def _get_status_api_request(self) -> RequestItem:
status_query_param = {
"execution_id": CollectionKey.STATUS_EXEC_ID_DEFAULT,
ApiExecution.INCLUDE_METADATA: "False",
ApiExecution.INCLUDE_METRICS: "False",
}
status_query_str = urlencode(status_query_param)
abs_api_endpoint = urljoin(settings.WEB_APP_ORIGIN_URL, self.api_endpoint)
Expand Down
32 changes: 32 additions & 0 deletions backend/api_v2/serializers.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import uuid
from collections import OrderedDict
from typing import Any, Union

Expand All @@ -15,6 +16,8 @@
ValidationError,
)
from utils.serializer.integrity_error_mixin import IntegrityErrorMixin
from workflow_manager.workflow_v2.exceptions import ExecutionDoesNotExistError
from workflow_manager.workflow_v2.models.execution import WorkflowExecution

from backend.serializers import AuditSerializer

Expand Down Expand Up @@ -103,6 +106,7 @@ class ExecutionRequestSerializer(Serializer):
timeout (int): Timeout for the API deployment, maximum value can be 300s.
If -1 it corresponds to async execution. Defaults to -1
include_metadata (bool): Flag to include metadata in API response
include_metrics (bool): Flag to include metrics in API response
use_file_history (bool): Flag to use FileHistory to save and retrieve
responses quickly. This is undocumented to the user and can be
helpful for demos.
Expand All @@ -112,9 +116,37 @@ class ExecutionRequestSerializer(Serializer):
min_value=-1, max_value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC, default=-1
)
include_metadata = BooleanField(default=False)
include_metrics = BooleanField(default=False)
use_file_history = BooleanField(default=False)


class ExecutionQuerySerializer(Serializer):
execution_id = CharField(required=True)
include_metadata = BooleanField(default=False)
include_metrics = BooleanField(default=False)

def validate_execution_id(self, value):
"""Trim spaces, validate UUID format, and check if execution_id exists."""
value = value.strip()

# Validate UUID format
try:
uuid_obj = uuid.UUID(value)
except ValueError:
raise ValidationError(
f"Invalid execution_id '{value}'. Must be a valid UUID."
)

# Check if UUID exists in the database
exists = WorkflowExecution.objects.filter(id=uuid_obj).exists()
if not exists:
raise ExecutionDoesNotExistError(
f"Execution with ID '{value}' does not exist."
)

return str(uuid_obj)


class APIDeploymentListSerializer(ModelSerializer):
workflow_name = CharField(source="workflow.workflow_name", read_only=True)

Expand Down
Loading

0 comments on commit 9000cd0

Please sign in to comment.