diff --git a/backend/account_v2/authentication_helper.py b/backend/account_v2/authentication_helper.py index e303445fc..d99a424ab 100644 --- a/backend/account_v2/authentication_helper.py +++ b/backend/account_v2/authentication_helper.py @@ -109,13 +109,25 @@ def remove_user_from_organization_by_user_id( Parameters: user_id (str): The user_id of the users to remove. """ + + organization_user = OrganizationMemberService.get_user_by_user_id(user_id) + if not organization_user: + logger.warning( + f"User removal skipped: User '{user_id}' not found in " + f"organization '{organization_id}'." + ) + return + # removing user from organization OrganizationMemberService.remove_user_by_user_id(user_id) + # removing user m2m relations , while removing user User.objects.get(user_id=user_id).prompt_registries.clear() User.objects.get(user_id=user_id).shared_custom_tools.clear() User.objects.get(user_id=user_id).shared_adapters_instance.clear() + # removing user from organization cache OrganizationMemberService.remove_user_membership_in_organization_cache( user_id=user_id, organization_id=organization_id ) + logger.info(f"User '{user_id}' removed from organization '{organization_id}'") diff --git a/backend/api_v2/api_deployment_views.py b/backend/api_v2/api_deployment_views.py index fddf8d536..c820363cf 100644 --- a/backend/api_v2/api_deployment_views.py +++ b/backend/api_v2/api_deployment_views.py @@ -11,6 +11,7 @@ APIDeploymentListSerializer, APIDeploymentSerializer, DeploymentResponseSerializer, + ExecutionQuerySerializer, ExecutionRequestSerializer, ) from django.db.models import QuerySet @@ -51,6 +52,7 @@ def post( serializer.is_valid(raise_exception=True) timeout = serializer.validated_data.get(ApiExecution.TIMEOUT_FORM_DATA) include_metadata = serializer.validated_data.get(ApiExecution.INCLUDE_METADATA) + include_metrics = serializer.validated_data.get(ApiExecution.INCLUDE_METRICS) use_file_history = serializer.validated_data.get(ApiExecution.USE_FILE_HISTORY) if not file_objs or len(file_objs) == 0: raise InvalidAPIRequest("File shouldn't be empty") @@ -60,6 +62,7 @@ def post( file_objs=file_objs, timeout=timeout, include_metadata=include_metadata, + include_metrics=include_metrics, use_file_history=use_file_history, ) if "error" in response and response["error"]: @@ -73,21 +76,26 @@ def post( def get( self, request: Request, org_name: str, api_name: str, api: APIDeployment ) -> Response: - execution_id = request.query_params.get("execution_id") - include_metadata = ( - request.query_params.get(ApiExecution.INCLUDE_METADATA, "false").lower() - == "true" - ) - if not execution_id: - raise InvalidAPIRequest("execution_id shouldn't be empty") + serializer = ExecutionQuerySerializer(data=request.query_params) + serializer.is_valid(raise_exception=True) + + execution_id = serializer.validated_data.get(ApiExecution.EXECUTION_ID) + include_metadata = serializer.validated_data.get(ApiExecution.INCLUDE_METADATA) + include_metrics = serializer.validated_data.get(ApiExecution.INCLUDE_METRICS) + + # Fetch execution status response: ExecutionResponse = DeploymentHelper.get_execution_status( - execution_id=execution_id + execution_id ) + + # Determine response status response_status = status.HTTP_422_UNPROCESSABLE_ENTITY if response.execution_status == CeleryTaskState.COMPLETED.value: response_status = status.HTTP_200_OK if not include_metadata: response.remove_result_metadata_keys() + if not include_metrics: + response.remove_result_metrics() if response.result_acknowledged: response_status = status.HTTP_406_NOT_ACCEPTABLE response.result = "Result already acknowledged" diff --git a/backend/api_v2/constants.py b/backend/api_v2/constants.py index 2de41efb2..f6a980303 100644 --- a/backend/api_v2/constants.py +++ b/backend/api_v2/constants.py @@ -4,4 +4,6 @@ class ApiExecution: FILES_FORM_DATA: str = "files" TIMEOUT_FORM_DATA: str = "timeout" INCLUDE_METADATA: str = "include_metadata" + INCLUDE_METRICS: str = "include_metrics" USE_FILE_HISTORY: str = "use_file_history" # Undocumented parameter + EXECUTION_ID: str = "execution_id" diff --git a/backend/api_v2/deployment_helper.py b/backend/api_v2/deployment_helper.py index 3a17d8350..acdb6f673 100644 --- a/backend/api_v2/deployment_helper.py +++ b/backend/api_v2/deployment_helper.py @@ -136,6 +136,7 @@ def execute_workflow( file_objs: list[UploadedFile], timeout: int, include_metadata: bool = False, + include_metrics: bool = False, use_file_history: bool = False, ) -> ReturnDict: """Execute workflow by api. @@ -180,6 +181,8 @@ def execute_workflow( ) if not include_metadata: result.remove_result_metadata_keys() + if not include_metrics: + result.remove_result_metrics() except Exception as error: DestinationConnector.delete_api_storage_dir( workflow_id=workflow_id, execution_id=execution_id diff --git a/backend/api_v2/postman_collection/dto.py b/backend/api_v2/postman_collection/dto.py index a9ffd01f3..85d72b6f8 100644 --- a/backend/api_v2/postman_collection/dto.py +++ b/backend/api_v2/postman_collection/dto.py @@ -118,6 +118,7 @@ def get_form_data_items(self) -> list[FormDataItem]: value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC, ), FormDataItem(key=ApiExecution.INCLUDE_METADATA, type="text", value="False"), + FormDataItem(key=ApiExecution.INCLUDE_METRICS, type="text", value="False"), ] def get_api_key(self) -> str: @@ -131,6 +132,7 @@ def _get_status_api_request(self) -> RequestItem: status_query_param = { "execution_id": CollectionKey.STATUS_EXEC_ID_DEFAULT, ApiExecution.INCLUDE_METADATA: "False", + ApiExecution.INCLUDE_METRICS: "False", } status_query_str = urlencode(status_query_param) abs_api_endpoint = urljoin(settings.WEB_APP_ORIGIN_URL, self.api_endpoint) diff --git a/backend/api_v2/serializers.py b/backend/api_v2/serializers.py index 655478e4b..9fffc748a 100644 --- a/backend/api_v2/serializers.py +++ b/backend/api_v2/serializers.py @@ -1,3 +1,4 @@ +import uuid from collections import OrderedDict from typing import Any, Union @@ -15,6 +16,8 @@ ValidationError, ) from utils.serializer.integrity_error_mixin import IntegrityErrorMixin +from workflow_manager.workflow_v2.exceptions import ExecutionDoesNotExistError +from workflow_manager.workflow_v2.models.execution import WorkflowExecution from backend.serializers import AuditSerializer @@ -103,6 +106,7 @@ class ExecutionRequestSerializer(Serializer): timeout (int): Timeout for the API deployment, maximum value can be 300s. If -1 it corresponds to async execution. Defaults to -1 include_metadata (bool): Flag to include metadata in API response + include_metrics (bool): Flag to include metrics in API response use_file_history (bool): Flag to use FileHistory to save and retrieve responses quickly. This is undocumented to the user and can be helpful for demos. @@ -112,9 +116,37 @@ class ExecutionRequestSerializer(Serializer): min_value=-1, max_value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC, default=-1 ) include_metadata = BooleanField(default=False) + include_metrics = BooleanField(default=False) use_file_history = BooleanField(default=False) +class ExecutionQuerySerializer(Serializer): + execution_id = CharField(required=True) + include_metadata = BooleanField(default=False) + include_metrics = BooleanField(default=False) + + def validate_execution_id(self, value): + """Trim spaces, validate UUID format, and check if execution_id exists.""" + value = value.strip() + + # Validate UUID format + try: + uuid_obj = uuid.UUID(value) + except ValueError: + raise ValidationError( + f"Invalid execution_id '{value}'. Must be a valid UUID." + ) + + # Check if UUID exists in the database + exists = WorkflowExecution.objects.filter(id=uuid_obj).exists() + if not exists: + raise ExecutionDoesNotExistError( + f"Execution with ID '{value}' does not exist." + ) + + return str(uuid_obj) + + class APIDeploymentListSerializer(ModelSerializer): workflow_name = CharField(source="workflow.workflow_name", read_only=True) diff --git a/backend/pdm.lock b/backend/pdm.lock index 4240e9f21..13281db97 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "deploy", "dev", "test"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:450e2c25503dcaddb33525ed3e4f82ed938e1c3f5a94bee18a58150c0d8b8446" +content_hash = "sha256:9f9359ffbaa543b5bc5b9863a4d12435a5f44185df810828d6c8d9ce22bede93" [[package]] name = "adlfs" @@ -151,16 +151,16 @@ files = [ [[package]] name = "aiosignal" -version = "1.3.1" -requires_python = ">=3.7" +version = "1.3.2" +requires_python = ">=3.9" summary = "aiosignal: a list of registered asynchronous callbacks" groups = ["default", "dev"] dependencies = [ "frozenlist>=1.1.0", ] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [[package]] @@ -311,13 +311,13 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" -requires_python = ">=3.7" +version = "24.3.0" +requires_python = ">=3.8" summary = "Classes Without Boilerplate" groups = ["default", "dev"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [[package]] @@ -624,13 +624,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default", "dev"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -817,7 +817,7 @@ files = [ [[package]] name = "croniter" -version = "5.0.1" +version = "6.0.0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6" summary = "croniter provides iteration for datetime object with cron like format" groups = ["default"] @@ -826,8 +826,8 @@ dependencies = [ "pytz>2021.1", ] files = [ - {file = "croniter-5.0.1-py2.py3-none-any.whl", hash = "sha256:eb28439742291f6c10b181df1a5ecf421208b1fc62ef44501daec1780a0b09e9"}, - {file = "croniter-5.0.1.tar.gz", hash = "sha256:7d9b1ef25b10eece48fdf29d8ac52f9b6252abff983ac614ade4f3276294019e"}, + {file = "croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368"}, + {file = "croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577"}, ] [[package]] @@ -1448,7 +1448,7 @@ files = [ [[package]] name = "google-cloud-aiplatform" -version = "1.74.0" +version = "1.75.0" requires_python = ">=3.8" summary = "Vertex AI API client library" groups = ["default", "dev"] @@ -1466,8 +1466,8 @@ dependencies = [ "shapely<3.0.0dev", ] files = [ - {file = "google_cloud_aiplatform-1.74.0-py2.py3-none-any.whl", hash = "sha256:7f37a835e543a4cb4b62505928b983e307c5fee6d949f831cd3804f03c753d87"}, - {file = "google_cloud_aiplatform-1.74.0.tar.gz", hash = "sha256:2202e4e0cbbd2db02835737a1ae9a51ad7bf75c8ed130a3fdbcfced33525e3f0"}, + {file = "google_cloud_aiplatform-1.75.0-py2.py3-none-any.whl", hash = "sha256:eb5d79b5f7210d79a22b53c93a69b5bae5680dfc829387ea020765b97786b3d0"}, + {file = "google_cloud_aiplatform-1.75.0.tar.gz", hash = "sha256:eb8404abf1134b3b368535fe429c4eec2fd12d444c2e9ffbc329ddcbc72b36c9"}, ] [[package]] @@ -1510,7 +1510,7 @@ files = [ [[package]] name = "google-cloud-resource-manager" -version = "1.13.1" +version = "1.14.0" requires_python = ">=3.7" summary = "Google Cloud Resource Manager API client library" groups = ["default", "dev"] @@ -1522,8 +1522,8 @@ dependencies = [ "protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev,>=3.20.2", ] files = [ - {file = "google_cloud_resource_manager-1.13.1-py2.py3-none-any.whl", hash = "sha256:abdc7d443ab6c0763b8ed49ab59203e223f14c683df69e3748d5eb2237475f5f"}, - {file = "google_cloud_resource_manager-1.13.1.tar.gz", hash = "sha256:bee9f2fb1d856731182b7cc05980d216aae848947ccdadf2848a2c64ccd6bbea"}, + {file = "google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c"}, + {file = "google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30"}, ] [[package]] @@ -1936,7 +1936,7 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.26.5" +version = "0.27.0" requires_python = ">=3.8.0" summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" groups = ["default", "dev"] @@ -1950,8 +1950,8 @@ dependencies = [ "typing-extensions>=3.7.4.3", ] files = [ - {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, - {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, + {file = "huggingface_hub-0.27.0-py3-none-any.whl", hash = "sha256:8f2e834517f1f1ddf1ecc716f91b120d7333011b7485f665a9a412eacb1a2a81"}, + {file = "huggingface_hub-0.27.0.tar.gz", hash = "sha256:902cce1a1be5739f5589e560198a65a8edcfd3b830b1666f36e4b961f0454fac"}, ] [[package]] @@ -2958,7 +2958,7 @@ files = [ [[package]] name = "openai" -version = "1.57.2" +version = "1.58.1" requires_python = ">=3.8" summary = "The official Python library for the openai API" groups = ["default", "dev"] @@ -2973,8 +2973,8 @@ dependencies = [ "typing-extensions<5,>=4.11", ] files = [ - {file = "openai-1.57.2-py3-none-any.whl", hash = "sha256:f7326283c156fdee875746e7e54d36959fb198eadc683952ee05e3302fbd638d"}, - {file = "openai-1.57.2.tar.gz", hash = "sha256:5f49fd0f38e9f2131cda7deb45dafdd1aee4f52a637e190ce0ecf40147ce8cee"}, + {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, + {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, ] [[package]] @@ -3871,7 +3871,7 @@ files = [ [[package]] name = "python-engineio" -version = "4.10.1" +version = "4.11.1" requires_python = ">=3.6" summary = "Engine.IO server and client for Python" groups = ["default"] @@ -3879,8 +3879,8 @@ dependencies = [ "simple-websocket>=0.10.0", ] files = [ - {file = "python_engineio-4.10.1-py3-none-any.whl", hash = "sha256:445a94004ec8034960ab99e7ce4209ec619c6e6b6a12aedcb05abeab924025c0"}, - {file = "python_engineio-4.10.1.tar.gz", hash = "sha256:166cea8dd7429638c5c4e3a4895beae95196e860bc6f29ed0b9fe753d1ef2072"}, + {file = "python_engineio-4.11.1-py3-none-any.whl", hash = "sha256:8ff9ec366724cd9b0fd92acf7a61b15ae923d28f37f842304adbd7f71b3d6672"}, + {file = "python_engineio-4.11.1.tar.gz", hash = "sha256:ff8a23a843c223ec793835f1bcf584ff89ce0f1c2bcce37dffa6436c6fa74133"}, ] [[package]] @@ -4008,16 +4008,16 @@ files = [ [[package]] name = "redis" -version = "5.0.8" -requires_python = ">=3.7" +version = "5.2.1" +requires_python = ">=3.8" summary = "Python client for Redis database and key-value store" groups = ["default", "dev"] dependencies = [ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"", ] files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [[package]] @@ -5024,7 +5024,7 @@ dependencies = [ "kombu==5.3.7", "llama-index-llms-azure-openai==0.1.10", "llama-index==0.10.58", - "redis~=5.0.1", + "redis~=5.2.1", "requests==2.31.0", ] @@ -5055,7 +5055,7 @@ dependencies = [ [[package]] name = "unstract-sdk" -version = "0.54.0rc6" +version = "0.54.0rc8" requires_python = "<3.11.1,>=3.9" summary = "A framework for writing Unstract Tools/Apps" groups = ["default", "dev"] @@ -5089,13 +5089,14 @@ dependencies = [ "pdfplumber>=0.11.2", "python-dotenv==1.0.0", "python-magic~=0.4.27", + "redis>=5.2.1", "singleton-decorator~=1.0.0", "tiktoken~=0.4.0", "transformers==4.37.0", ] files = [ - {file = "unstract_sdk-0.54.0rc6-py3-none-any.whl", hash = "sha256:bb0bbee12a7fb47d53adc14a2d9ddbfcebcf6abed8b0b6deb927f64921c34630"}, - {file = "unstract_sdk-0.54.0rc6.tar.gz", hash = "sha256:410d01a07402fe8b80a1d253daded10512f36c2801e4fc94258b4d2fe9d785fb"}, + {file = "unstract_sdk-0.54.0rc8-py3-none-any.whl", hash = "sha256:c71a4a20c2ae9aac6830297251f74b0e798392ff19fee8481befbf7f4e3b8ba2"}, + {file = "unstract_sdk-0.54.0rc8.tar.gz", hash = "sha256:08a1ec113f96b93c39e0d1b9df39db8eb7957eee1365f5fb5b1432b1b4353a25"}, ] [[package]] @@ -5111,7 +5112,7 @@ dependencies = [ "docker~=6.1.3", "jsonschema~=4.18.2", "unstract-flags", - "unstract-sdk~=0.54.0rc6", + "unstract-sdk~=0.54.0rc8", "unstract-tool-sandbox", ] diff --git a/backend/prompt_studio/prompt_profile_manager/__init__.py b/backend/prompt_studio/prompt_profile_manager/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_profile_manager/admin.py b/backend/prompt_studio/prompt_profile_manager/admin.py deleted file mode 100644 index 6878f3f43..000000000 --- a/backend/prompt_studio/prompt_profile_manager/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import ProfileManager - -admin.site.register(ProfileManager) diff --git a/backend/prompt_studio/prompt_profile_manager/apps.py b/backend/prompt_studio/prompt_profile_manager/apps.py deleted file mode 100644 index 635132b93..000000000 --- a/backend/prompt_studio/prompt_profile_manager/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ProfileManager(AppConfig): - name = "prompt_studio.prompt_profile_manager" diff --git a/backend/prompt_studio/prompt_profile_manager/constants.py b/backend/prompt_studio/prompt_profile_manager/constants.py deleted file mode 100644 index 6540b58ee..000000000 --- a/backend/prompt_studio/prompt_profile_manager/constants.py +++ /dev/null @@ -1,18 +0,0 @@ -class ProfileManagerKeys: - CREATED_BY = "created_by" - TOOL_ID = "tool_id" - PROMPTS = "prompts" - ADAPTER_NAME = "adapter_name" - LLM = "llm" - VECTOR_STORE = "vector_store" - EMBEDDING_MODEL = "embedding_model" - X2TEXT = "x2text" - PROMPT_STUDIO_TOOL = "prompt_studio_tool" - MAX_PROFILE_COUNT = 4 - - -class ProfileManagerErrors: - SERIALIZATION_FAILED = "Data Serialization Failed." - PROFILE_NAME_EXISTS = "A profile with this name already exists." - DUPLICATE_API = "It appears that a duplicate call may have been made." - PLATFORM_ERROR = "Seems an error occured in Platform Service." diff --git a/backend/prompt_studio/prompt_profile_manager/exceptions.py b/backend/prompt_studio/prompt_profile_manager/exceptions.py deleted file mode 100644 index 023f6ad1c..000000000 --- a/backend/prompt_studio/prompt_profile_manager/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -from rest_framework.exceptions import APIException - - -class PlatformServiceError(APIException): - status_code = 400 - default_detail = "Seems an error occured in Platform Service." diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0001_initial.py b/backend/prompt_studio/prompt_profile_manager/migrations/0001_initial.py deleted file mode 100644 index 72e40bcc6..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0001_initial.py +++ /dev/null @@ -1,109 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-20 08:04 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="ProfileManager", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "profile_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ("profile_name", models.TextField(unique=True)), - ( - "vector_store", - models.TextField( - db_comment="Field to store the chosen vector store." - ), - ), - ("embedding_model", models.TextField()), - ( - "llm", - models.TextField( - db_comment="Field to store the LLM chosen by the user" - ), - ), - ("chunk_size", models.IntegerField(blank=True, null=True)), - ("chunk_overlap", models.IntegerField(blank=True, null=True)), - ("reindex", models.BooleanField(default=False)), - ("vector_size", models.IntegerField()), - ( - "pdf_to_text_converters", - models.TextField(blank=True, null=True), - ), - ( - "retrival_strategy", - models.TextField( - blank=True, - choices=[ - ("simple", "Simple retrieval"), - ("subquestion", "Subquestion from prompt"), - ("vector+keyword", "Uses vector for retrieval"), - ], - db_comment="Field to store the retrieval strategy for prompts", - ), - ), - ( - "similarity_top_k", - models.IntegerField( - blank=True, - db_comment="Field to store matching count", - null=True, - ), - ), - ( - "section", - models.TextField( - blank=True, - db_comment="Field to store limit to section", - null=True, - ), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="profile_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="profile_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0002_remove_profilemanager_vector_size_and_more.py b/backend/prompt_studio/prompt_profile_manager/migrations/0002_remove_profilemanager_vector_size_and_more.py deleted file mode 100644 index 2fbed11eb..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0002_remove_profilemanager_vector_size_and_more.py +++ /dev/null @@ -1,47 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-24 14:29 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("adapter_processor", "0002_adapterinstance_unique_adapter"), - ("prompt_profile_manager", "0001_initial"), - ] - - operations = [ - migrations.RemoveField( - model_name="profilemanager", - name="vector_size", - ), - migrations.AlterField( - model_name="profilemanager", - name="embedding_model", - field=models.ForeignKey( - on_delete=django.db.models.deletion.PROTECT, - related_name="profile_manager_embedding", - to="adapter_processor.adapterinstance", - ), - ), - migrations.AlterField( - model_name="profilemanager", - name="llm", - field=models.ForeignKey( - db_comment="Field to store the LLM chosen by the user", - on_delete=django.db.models.deletion.PROTECT, - related_name="profile_manager_llm", - to="adapter_processor.adapterinstance", - ), - ), - migrations.AlterField( - model_name="profilemanager", - name="vector_store", - field=models.ForeignKey( - db_comment="Field to store the chosen vector store.", - on_delete=django.db.models.deletion.PROTECT, - related_name="profile_manager_vector", - to="adapter_processor.adapterinstance", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0002_rename_updated_at_profilemanager_modified_at.py b/backend/prompt_studio/prompt_profile_manager/migrations/0002_rename_updated_at_profilemanager_modified_at.py deleted file mode 100644 index 6ffd7fc96..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0002_rename_updated_at_profilemanager_modified_at.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-23 19:02 - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_profile_manager", "0001_initial"), - ] - - operations = [ - migrations.RenameField( - model_name="profilemanager", - old_name="updated_at", - new_name="modified_at", - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0003_merge_20240125_0530.py b/backend/prompt_studio/prompt_profile_manager/migrations/0003_merge_20240125_0530.py deleted file mode 100644 index ed6213d52..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0003_merge_20240125_0530.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-25 05:30 - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_profile_manager", - "0002_remove_profilemanager_vector_size_and_more", - ), - ( - "prompt_profile_manager", - "0002_rename_updated_at_profilemanager_modified_at", - ), - ] - - operations = [] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0004_rename_retrival_strategy_profilemanager_retrieval_strategy.py b/backend/prompt_studio/prompt_profile_manager/migrations/0004_rename_retrival_strategy_profilemanager_retrieval_strategy.py deleted file mode 100644 index 19ae21115..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0004_rename_retrival_strategy_profilemanager_retrieval_strategy.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-08 09:47 - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_profile_manager", "0003_merge_20240125_0530"), - ] - - operations = [ - migrations.RenameField( - model_name="profilemanager", - old_name="retrival_strategy", - new_name="retrieval_strategy", - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0005_removed_converter_and_added_x2text_foreign_key.py b/backend/prompt_studio/prompt_profile_manager/migrations/0005_removed_converter_and_added_x2text_foreign_key.py deleted file mode 100644 index 482ad5f80..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0005_removed_converter_and_added_x2text_foreign_key.py +++ /dev/null @@ -1,71 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-23 11:10 - -import json - -import django.db.models.deletion -from cryptography.fernet import Fernet -from django.conf import settings -from django.db import connection, migrations, models - - -def fill_with_default_x2text(apps, schema): - ProfileManager = apps.get_model("prompt_profile_manager", "ProfileManager") - AdapterInstance = apps.get_model("adapter_processor", "AdapterInstance") - - encryption_secret: str = settings.ENCRYPTION_KEY - f: Fernet = Fernet(encryption_secret.encode("utf-8")) - metadata = {"url": "http://unstract-unstructured-io:8000/general/v0/general"} - json_string = json.dumps(metadata) - metadata_b = f.encrypt(json_string.encode("utf-8")) - - adapter_instance = AdapterInstance( - adapter_name="DefaultX2text", - adapter_id="unstructuredcommunity|eeed506f-1875-457f-9101-846fc7115676", - adapter_type="X2TEXT", - adapter_metadata_b=metadata_b, - ) - adapter_instance.save() - ProfileManager.objects.filter(x2text__isnull=True).update(x2text=adapter_instance) - - -def reversal_x2text(*args): - """Reversal is NOOP since x2text is simply dropped during reverse.""" - - -def disable_triggers(apps, schema_editor): - with connection.cursor() as cursor: - cursor.execute("ALTER TABLE adapter_adapterinstance DISABLE TRIGGER ALL;") - - -class Migration(migrations.Migration): - dependencies = [ - ("adapter_processor", "0004_alter_adapterinstance_adapter_type"), - ( - "prompt_profile_manager", - "0004_rename_retrival_strategy_profilemanager_retrieval_strategy", - ), - ] - - operations = [ - migrations.RemoveField( - model_name="profilemanager", - name="pdf_to_text_converters", - ), - # migrations.RunPython( - # disable_triggers, reverse_code=migrations.RunPython.noop - # ), - migrations.AddField( - model_name="profilemanager", - name="x2text", - field=models.ForeignKey( - db_comment="Field to store the X2Text Adapter chosen by the user", - null=True, - on_delete=django.db.models.deletion.PROTECT, - related_name="profile_manager_x2text", - to="adapter_processor.adapterinstance", - ), - ), - # This function is not required for fresh instances as the profile manager table itself will be empty. - # The existing environments have already completed this migration. Hence it won't be necessary. - # migrations.RunPython(fill_with_default_x2text, reversal_x2text), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0006_alter_profilemanager_x2text.py b/backend/prompt_studio/prompt_profile_manager/migrations/0006_alter_profilemanager_x2text.py deleted file mode 100644 index d36581058..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0006_alter_profilemanager_x2text.py +++ /dev/null @@ -1,37 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-23 17:02 - -import django.db.models.deletion -from django.db import connection, migrations, models - - -def enable_triggers(apps, schema_editor): - with connection.cursor() as cursor: - cursor.execute("ALTER TABLE adapter_adapterinstance ENABLE TRIGGER ALL;") - - -class Migration(migrations.Migration): - dependencies = [ - ("adapter_processor", "0004_alter_adapterinstance_adapter_type"), - ( - "prompt_profile_manager", - "0005_removed_converter_and_added_x2text_foreign_key", - ), - ] - - operations = [ - migrations.AlterField( - model_name="profilemanager", - name="x2text", - field=models.ForeignKey( - db_comment="Field to store the X2Text Adapter chosen by the user", - on_delete=django.db.models.deletion.PROTECT, - related_name="profile_manager_x2text", - to="adapter_processor.adapterinstance", - ), - ), - # Without superuser role this statement cannot be executed - # and this is als0 not recommended. - # migrations.RunPython( - # enable_triggers, - # ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0007_profilemanager_is_default_and_more.py b/backend/prompt_studio/prompt_profile_manager/migrations/0007_profilemanager_is_default_and_more.py deleted file mode 100644 index e66aeb27b..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0007_profilemanager_is_default_and_more.py +++ /dev/null @@ -1,53 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-08 11:44 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0007_remove_customtool_default_profile_and_more", - ), - ("prompt_profile_manager", "0006_alter_profilemanager_x2text"), - ] - - operations = [ - migrations.AddField( - model_name="profilemanager", - name="is_default", - field=models.BooleanField( - db_comment="Default LLM Profile used in prompt", default=False - ), - ), - migrations.AddField( - model_name="profilemanager", - name="is_summarize_llm", - field=models.BooleanField( - db_comment="Default LLM Profile used for summarizing", - default=False, - ), - ), - migrations.AddField( - model_name="profilemanager", - name="prompt_studio_tool", - field=models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.PROTECT, - to="prompt_studio_core.customtool", - ), - ), - migrations.AlterField( - model_name="profilemanager", - name="profile_name", - field=models.TextField(), - ), - migrations.AddConstraint( - model_name="profilemanager", - constraint=models.UniqueConstraint( - fields=("prompt_studio_tool", "profile_name"), - name="unique_prompt_studio_tool_profile_name", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0008_profilemanager_migration.py b/backend/prompt_studio/prompt_profile_manager/migrations/0008_profilemanager_migration.py deleted file mode 100644 index e80273f38..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0008_profilemanager_migration.py +++ /dev/null @@ -1,72 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-08 06:30 - -from typing import Any - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_profile_manager", - "0007_profilemanager_is_default_and_more", - ), - ( - "prompt_studio", - "0006_alter_toolstudioprompt_prompt_key_and_more", - ), - ( - "prompt_studio_core", - "0007_remove_customtool_default_profile_and_more", - ), - ] - - def MigrateProfileManager(apps: Any, schema_editor: Any) -> None: - CustomTool = apps.get_model("prompt_studio_core", "CustomTool") - ProfileManager = apps.get_model("prompt_profile_manager", "ProfileManager") - ToolStudioPrompt = apps.get_model("prompt_studio", "ToolStudioPrompt") - - # Iterate over prompt studio tools - custom_tools = CustomTool.objects.all() - for custom_tool in custom_tools: - custom_prompts = ToolStudioPrompt.objects.filter( - tool_id=custom_tool - ).distinct("profile_manager") - """Iterate over prompts inside specific tool and return the once - which has distinct profile manager.""" - - for index, custom_prompt in enumerate(custom_prompts): - """There can be scenario where individual prompts wont have a - profile manager attached.""" - if not custom_prompt.profile_manager: - continue - profile_manager = ProfileManager.objects.get( - pk=custom_prompt.profile_manager.profile_id - ) - """Check this profile manager to see if the profile manager is - tagged to any prompt tool If its none, meaning profile manager - created was global and before migration.""" - - if not profile_manager.prompt_studio_tool: - # Copy Profile manager Id before replicating - profile_id = profile_manager.profile_id - # Replicate those profile manager - replicated_profile_manager = profile_manager - replicated_profile_manager.profile_id = None - replicated_profile_manager.profile_name = f"Profile_{index+1}" - """Attach the replicated profile manager to the - corresponding prompt tool.""" - replicated_profile_manager.prompt_studio_tool = custom_tool - replicated_profile_manager.save() - """Custom prompts where the profile manager and tool - associated replace it with replicated profile manager.""" - - ToolStudioPrompt.objects.filter( - profile_manager=profile_id, tool_id=custom_tool - ).update(profile_manager=replicated_profile_manager) - - operations = [ - migrations.RunPython( - MigrateProfileManager, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0009_alter_profilemanager_prompt_studio_tool.py b/backend/prompt_studio/prompt_profile_manager/migrations/0009_alter_profilemanager_prompt_studio_tool.py deleted file mode 100644 index bec9bd155..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0009_alter_profilemanager_prompt_studio_tool.py +++ /dev/null @@ -1,26 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-18 07:58 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0008_customtool_exclude_failed_customtool_monitor_llm", - ), - ("prompt_profile_manager", "0008_profilemanager_migration"), - ] - - operations = [ - migrations.AlterField( - model_name="profilemanager", - name="prompt_studio_tool", - field=models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="prompt_studio_core.customtool", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0010_alter_profilemanager_retrieval_strategy_and_more.py b/backend/prompt_studio/prompt_profile_manager/migrations/0010_alter_profilemanager_retrieval_strategy_and_more.py deleted file mode 100644 index 301d17ffe..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0010_alter_profilemanager_retrieval_strategy_and_more.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-21 08:36 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_profile_manager", - "0009_alter_profilemanager_prompt_studio_tool", - ), - ] - - operations = [ - migrations.AlterField( - model_name="profilemanager", - name="retrieval_strategy", - field=models.TextField( - blank=True, - choices=[("simple", "Simple retrieval")], - db_comment="Field to store the retrieval strategy for prompts", - ), - ), - migrations.AlterField( - model_name="profilemanager", - name="similarity_top_k", - field=models.IntegerField( - blank=True, - db_comment="Field to store number of top embeddings to take into context", - null=True, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/0011_alter_profilemanager_retrieval_strategy.py b/backend/prompt_studio/prompt_profile_manager/migrations/0011_alter_profilemanager_retrieval_strategy.py deleted file mode 100644 index defdb5c34..000000000 --- a/backend/prompt_studio/prompt_profile_manager/migrations/0011_alter_profilemanager_retrieval_strategy.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 4.2.1 on 2024-05-14 09:58 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ( - "prompt_profile_manager", - "0010_alter_profilemanager_retrieval_strategy_and_more", - ), - ] - - operations = [ - migrations.AlterField( - model_name="profilemanager", - name="retrieval_strategy", - field=models.TextField( - blank=True, - choices=[ - ("simple", "Simple retrieval"), - ("subquestion", "Subquestion retrieval"), - ], - db_comment="Field to store the retrieval strategy for prompts", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_profile_manager/migrations/__init__.py b/backend/prompt_studio/prompt_profile_manager/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_profile_manager/models.py b/backend/prompt_studio/prompt_profile_manager/models.py deleted file mode 100644 index 7dc8152c4..000000000 --- a/backend/prompt_studio/prompt_profile_manager/models.py +++ /dev/null @@ -1,112 +0,0 @@ -import uuid - -from account.models import User -from adapter_processor.models import AdapterInstance -from django.db import models -from prompt_studio.prompt_studio_core.exceptions import DefaultProfileError -from prompt_studio.prompt_studio_core.models import CustomTool -from utils.models.base_model import BaseModel - - -class ProfileManager(BaseModel): - """Model to store the LLM Triad management details for Prompt.""" - - class RetrievalStrategy(models.TextChoices): - SIMPLE = "simple", "Simple retrieval" - SUBQUESTION = "subquestion", "Subquestion retrieval" - - profile_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - profile_name = models.TextField(blank=False) - vector_store = models.ForeignKey( - AdapterInstance, - db_comment="Field to store the chosen vector store.", - blank=False, - null=False, - on_delete=models.PROTECT, - related_name="profile_manager_vector", - ) - embedding_model = models.ForeignKey( - AdapterInstance, - blank=False, - null=False, - on_delete=models.PROTECT, - related_name="profile_manager_embedding", - ) - llm = models.ForeignKey( - AdapterInstance, - db_comment="Field to store the LLM chosen by the user", - blank=False, - null=False, - on_delete=models.PROTECT, - related_name="profile_manager_llm", - ) - x2text = models.ForeignKey( - AdapterInstance, - db_comment="Field to store the X2Text Adapter chosen by the user", - blank=False, - null=False, - on_delete=models.PROTECT, - related_name="profile_manager_x2text", - ) - chunk_size = models.IntegerField(null=True, blank=True) - chunk_overlap = models.IntegerField(null=True, blank=True) - reindex = models.BooleanField(default=False) - retrieval_strategy = models.TextField( - choices=RetrievalStrategy.choices, - blank=True, - db_comment="Field to store the retrieval strategy for prompts", - ) - similarity_top_k = models.IntegerField( - blank=True, - null=True, - db_comment="Field to store number of top embeddings to take into context", # noqa: E501 - ) - section = models.TextField( - blank=True, null=True, db_comment="Field to store limit to section" - ) - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="profile_created_by", - null=True, - blank=True, - editable=False, - ) - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="profile_modified_by", - null=True, - blank=True, - editable=False, - ) - - prompt_studio_tool = models.ForeignKey( - CustomTool, on_delete=models.CASCADE, null=True - ) - is_default = models.BooleanField( - default=False, - db_comment="Default LLM Profile used in prompt", - ) - - is_summarize_llm = models.BooleanField( - default=False, - db_comment="Default LLM Profile used for summarizing", - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["prompt_studio_tool", "profile_name"], - name="unique_prompt_studio_tool_profile_name", - ), - ] - - @staticmethod - def get_default_llm_profile(tool: CustomTool) -> "ProfileManager": - try: - return ProfileManager.objects.get( # type: ignore - prompt_studio_tool=tool, is_default=True - ) - except ProfileManager.DoesNotExist: - raise DefaultProfileError diff --git a/backend/prompt_studio/prompt_profile_manager/profile_manager_helper.py b/backend/prompt_studio/prompt_profile_manager/profile_manager_helper.py deleted file mode 100644 index 68783b551..000000000 --- a/backend/prompt_studio/prompt_profile_manager/profile_manager_helper.py +++ /dev/null @@ -1,11 +0,0 @@ -from prompt_studio.prompt_profile_manager.models import ProfileManager - - -class ProfileManagerHelper: - - @classmethod - def get_profile_manager(cls, profile_manager_id: str) -> ProfileManager: - try: - return ProfileManager.objects.get(profile_id=profile_manager_id) - except ProfileManager.DoesNotExist: - raise ValueError("ProfileManager does not exist.") diff --git a/backend/prompt_studio/prompt_profile_manager/serializers.py b/backend/prompt_studio/prompt_profile_manager/serializers.py deleted file mode 100644 index 4d4753561..000000000 --- a/backend/prompt_studio/prompt_profile_manager/serializers.py +++ /dev/null @@ -1,40 +0,0 @@ -import logging - -from adapter_processor.adapter_processor import AdapterProcessor -from prompt_studio.prompt_profile_manager.constants import ProfileManagerKeys - -from backend.serializers import AuditSerializer - -from .models import ProfileManager - -logger = logging.getLogger(__name__) - - -class ProfileManagerSerializer(AuditSerializer): - class Meta: - model = ProfileManager - fields = "__all__" - - def to_representation(self, instance): # type: ignore - rep: dict[str, str] = super().to_representation(instance) - llm = rep[ProfileManagerKeys.LLM] - embedding = rep[ProfileManagerKeys.EMBEDDING_MODEL] - vector_db = rep[ProfileManagerKeys.VECTOR_STORE] - x2text = rep[ProfileManagerKeys.X2TEXT] - if llm: - rep[ProfileManagerKeys.LLM] = AdapterProcessor.get_adapter_instance_by_id( - llm - ) - if embedding: - rep[ProfileManagerKeys.EMBEDDING_MODEL] = ( - AdapterProcessor.get_adapter_instance_by_id(embedding) - ) - if vector_db: - rep[ProfileManagerKeys.VECTOR_STORE] = ( - AdapterProcessor.get_adapter_instance_by_id(vector_db) - ) - if x2text: - rep[ProfileManagerKeys.X2TEXT] = ( - AdapterProcessor.get_adapter_instance_by_id(x2text) - ) - return rep diff --git a/backend/prompt_studio/prompt_profile_manager/urls.py b/backend/prompt_studio/prompt_profile_manager/urls.py deleted file mode 100644 index ae95f1fb9..000000000 --- a/backend/prompt_studio/prompt_profile_manager/urls.py +++ /dev/null @@ -1,24 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns - -from .views import ProfileManagerView - -profile_manager_detail = ProfileManagerView.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } -) - - -urlpatterns = format_suffix_patterns( - [ - path( - "profile-manager//", - profile_manager_detail, - name="profile-manager-detail", - ), - ] -) diff --git a/backend/prompt_studio/prompt_profile_manager/views.py b/backend/prompt_studio/prompt_profile_manager/views.py deleted file mode 100644 index e42ed100b..000000000 --- a/backend/prompt_studio/prompt_profile_manager/views.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Any, Optional - -from account.custom_exceptions import DuplicateData -from django.db import IntegrityError -from django.db.models import QuerySet -from django.http import HttpRequest -from permissions.permission import IsOwner -from prompt_studio.prompt_profile_manager.constants import ( - ProfileManagerErrors, - ProfileManagerKeys, -) -from prompt_studio.prompt_profile_manager.serializers import ProfileManagerSerializer -from rest_framework import status, viewsets -from rest_framework.response import Response -from rest_framework.versioning import URLPathVersioning -from utils.filtering import FilterHelper - -from .models import ProfileManager - - -class ProfileManagerView(viewsets.ModelViewSet): - """Viewset to handle all Custom tool related operations.""" - - versioning_class = URLPathVersioning - permission_classes = [IsOwner] - serializer_class = ProfileManagerSerializer - - def get_queryset(self) -> Optional[QuerySet]: - filter_args = FilterHelper.build_filter_args( - self.request, - ProfileManagerKeys.CREATED_BY, - ) - if filter_args: - queryset = ProfileManager.objects.filter(**filter_args) - else: - queryset = ProfileManager.objects.all() - return queryset - - def create( - self, request: HttpRequest, *args: tuple[Any], **kwargs: dict[str, Any] - ) -> Response: - serializer: ProfileManagerSerializer = self.get_serializer(data=request.data) - # Overriding default exception behaviour - # TO DO : Handle model related exceptions. - serializer.is_valid(raise_exception=True) - try: - self.perform_create(serializer) - except IntegrityError: - raise DuplicateData(ProfileManagerErrors.PROFILE_NAME_EXISTS) - return Response(serializer.data, status=status.HTTP_201_CREATED) diff --git a/backend/prompt_studio/prompt_studio/__init__.py b/backend/prompt_studio/prompt_studio/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio/admin.py b/backend/prompt_studio/prompt_studio/admin.py deleted file mode 100644 index bbcd76c6e..000000000 --- a/backend/prompt_studio/prompt_studio/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import ToolStudioPrompt - -admin.site.register(ToolStudioPrompt) diff --git a/backend/prompt_studio/prompt_studio/apps.py b/backend/prompt_studio/prompt_studio/apps.py deleted file mode 100644 index 9e30ea8b3..000000000 --- a/backend/prompt_studio/prompt_studio/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ToolStudioPrompt(AppConfig): - name = "prompt_studio.prompt_studio" diff --git a/backend/prompt_studio/prompt_studio/constants.py b/backend/prompt_studio/prompt_studio/constants.py deleted file mode 100644 index 6554a9f8c..000000000 --- a/backend/prompt_studio/prompt_studio/constants.py +++ /dev/null @@ -1,32 +0,0 @@ -class ToolStudioPromptKeys: - CREATED_BY = "created_by" - TOOL_ID = "tool_id" - NUMBER = "Number" - FLOAT = "Float" - PG_VECTOR = "Postgres pg_vector" - ANSWERS = "answers" - UNIQUE_FILE_ID = "unique_file_id" - ID = "id" - FILE_NAME = "file_name" - UNDEFINED = "undefined" - ACTIVE = "active" - PROMPT_KEY = "prompt_key" - EVAL_METRIC_PREFIX = "eval_" - EVAL_RESULT_DELIM = "__" - SEQUENCE_NUMBER = "sequence_number" - START_SEQUENCE_NUMBER = "start_sequence_number" - END_SEQUENCE_NUMBER = "end_sequence_number" - PROMPT_ID = "prompt_id" - - -class ToolStudioPromptErrors: - SERIALIZATION_FAILED = "Data Serialization Failed." - DUPLICATE_API = "It appears that a duplicate call may have been made." - PROMPT_NAME_EXISTS = "Prompt with the name already exists" - - -class LogLevels: - INFO = "INFO" - ERROR = "ERROR" - DEBUG = "DEBUG" - RUN = "RUN" diff --git a/backend/prompt_studio/prompt_studio/controller.py b/backend/prompt_studio/prompt_studio/controller.py deleted file mode 100644 index e3de0dd30..000000000 --- a/backend/prompt_studio/prompt_studio/controller.py +++ /dev/null @@ -1,60 +0,0 @@ -import logging - -from django.db import models -from prompt_studio.prompt_studio.constants import ToolStudioPromptKeys -from prompt_studio.prompt_studio.helper import PromptStudioHelper -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio.serializers import ReorderPromptsSerializer -from rest_framework import status -from rest_framework.request import Request -from rest_framework.response import Response - -logger = logging.getLogger(__name__) - - -class PromptStudioController: - def reorder_prompts(self, request: Request, prompt_model: models.Model) -> Response: - """Reorder the sequence of prompts based on the start and end sequence - numbers. - - This action handles the reordering of prompts by updating their sequence - numbers. It increments or decrements the sequence numbers of the relevant - prompts to reflect the new order. If the start and end sequence numbers - are equal, it returns a bad request response. - - Args: - request (Request): The HTTP request object containing the data to - reorder prompts. - - Returns: - Response: A Response object with the status of the reordering operation. - """ - try: - # Validate request data - serializer = ReorderPromptsSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - - # Extract validated data from the serializer - start_sequence_number = serializer.validated_data.get( - ToolStudioPromptKeys.START_SEQUENCE_NUMBER - ) - end_sequence_number = serializer.validated_data.get( - ToolStudioPromptKeys.END_SEQUENCE_NUMBER - ) - prompt_id = serializer.validated_data.get(ToolStudioPromptKeys.PROMPT_ID) - - filtered_prompts_data = PromptStudioHelper.reorder_prompts_helper( - prompt_id=prompt_id, - start_sequence_number=start_sequence_number, - end_sequence_number=end_sequence_number, - prompt_model=prompt_model, - ) - - logger.info("Re-ordering completed successfully.") - return Response(status=status.HTTP_200_OK, data=filtered_prompts_data) - - except ToolStudioPrompt.DoesNotExist: - logger.error(f"Prompt with ID {prompt_id} not found.") - return Response( - status=status.HTTP_404_NOT_FOUND, data={"detail": "Prompt not found."} - ) diff --git a/backend/prompt_studio/prompt_studio/exceptions.py b/backend/prompt_studio/prompt_studio/exceptions.py deleted file mode 100644 index c78c3a740..000000000 --- a/backend/prompt_studio/prompt_studio/exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -from rest_framework.exceptions import APIException - - -class IndexingError(APIException): - status_code = 400 - default_detail = "Error while indexing file" - - -class AnswerFetchError(APIException): - status_code = 400 - default_detail = "Error occured while fetching response for the prompt" - - -class ToolNotValid(APIException): - status_code = 400 - default_detail = "Custom tool is not valid." diff --git a/backend/prompt_studio/prompt_studio/helper.py b/backend/prompt_studio/prompt_studio/helper.py deleted file mode 100644 index 8d70802f2..000000000 --- a/backend/prompt_studio/prompt_studio/helper.py +++ /dev/null @@ -1,114 +0,0 @@ -import logging - -from django.db import models - -logger = logging.getLogger(__name__) - - -class PromptStudioHelper: - @staticmethod - def reorder_prompts_helper( - prompt_id: str, - start_sequence_number: int, - end_sequence_number: int, - prompt_model: models.Model, - ) -> list[dict[str, int]]: - """Helper method to reorder prompts based on sequence numbers. - - Args: - prompt_id (str): The ID of the prompt to be reordered. - start_sequence_number (int): The initial sequence number of the prompt. - end_sequence_number (int): The new sequence number of the prompt. - is_sps (bool): Flag to determine the prompt model to use. - - Returns: - list[dict[str, int]]: A list of updated prompt data with their IDs - and new sequence numbers. - """ - - prompt_instance = prompt_model.objects.get(pk=prompt_id) - tool_id = prompt_instance.tool_id - - # Determine the direction of sequence adjustment based on start and - # end sequence numbers - if start_sequence_number < end_sequence_number: - logger.info( - "Start sequence number is less than end sequence number. " - "Decrementing sequence numbers." - ) - filters = { - "sequence_number__gt": start_sequence_number, - "sequence_number__lte": end_sequence_number, - "tool_id": tool_id, - } - increment = False - elif start_sequence_number > end_sequence_number: - logger.info( - "Start sequence number is greater than end sequence number. " - "Incrementing sequence numbers." - ) - filters = { - "sequence_number__lt": start_sequence_number, - "sequence_number__gte": end_sequence_number, - "tool_id": tool_id, - } - increment = True - - # Update sequence numbers and get filtered prompt data - filtered_prompts_data = PromptStudioHelper.update_sequence_numbers( - filters, increment, prompt_model - ) - - # Update the sequence number of the moved prompt - prompt_instance.sequence_number = end_sequence_number - prompt_instance.save() - - # Append the updated prompt instance data to the response - filtered_prompts_data.append( - { - "id": prompt_instance.prompt_id, - "sequence_number": prompt_instance.sequence_number, - } - ) - - return filtered_prompts_data - - @staticmethod - def update_sequence_numbers( - filters: dict, increment: bool, prompt_model: models.Model - ) -> list[dict[str, int]]: - """Update the sequence numbers for prompts based on the provided - filters and increment flag. - - Args: - filters (dict): The filter criteria for selecting prompts. - increment (bool): Whether to increment (True) or decrement (False) - the sequence numbers. - prompt_model: The model class for the prompts - (either ToolStudioPrompt or SPSPrompt). - - Returns: - list[dict[str, int]]: A list of updated prompt data with their IDs - and new sequence numbers. - """ - filtered_prompts = prompt_model.objects.filter(**filters) - - # List to hold updated prompt data - filtered_prompts_data = [] - - # Prepare updates and collect data - for prompt in filtered_prompts: - prompt.sequence_number += 1 if increment else -1 - - # Append prompt data to the list - filtered_prompts_data.append( - { - "id": prompt.prompt_id, - "sequence_number": prompt.sequence_number, - } - ) - - # Bulk update the sequence numbers - prompt_model.objects.bulk_update(filtered_prompts, ["sequence_number"]) - - return filtered_prompts_data diff --git a/backend/prompt_studio/prompt_studio/migrations/0001_initial.py b/backend/prompt_studio/prompt_studio/migrations/0001_initial.py deleted file mode 100644 index 0fb94aab1..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0001_initial.py +++ /dev/null @@ -1,145 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-20 08:04 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("prompt_studio_core", "0001_initial"), - ("prompt_profile_manager", "0001_initial"), - ] - - operations = [ - migrations.CreateModel( - name="ToolStudioPrompt", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "prompt_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "prompt_key", - models.TextField( - db_comment="Field to store the prompt key", unique=True - ), - ), - ( - "enforce_type", - models.TextField( - blank=True, - choices=[ - ("Text", "Response sent as Text"), - ("number", "Response sent as number"), - ("email", "Response sent as email"), - ("date", "Response sent as date"), - ("boolean", "Response sent as boolean"), - ("json", "Response sent as json"), - ], - db_comment="Field to store the type in which the response to be returned.", - ), - ), - ( - "prompt", - models.TextField(db_comment="Field to store the prompt"), - ), - ("sequence_number", models.IntegerField(blank=True, null=True)), - ( - "prompt_type", - models.TextField( - blank=True, - choices=[ - ("PROMPT", "Response sent as Text"), - ("NOTES", "Response sent as float"), - ], - db_comment="Field to store the type of the input prompt", - ), - ), - ("output", models.TextField(blank=True)), - ( - "assert_prompt", - models.TextField( - blank=True, - db_comment="Field to store the asserted prompt", - null=True, - ), - ), - ( - "assertion_failure_prompt", - models.TextField( - blank=True, - db_comment="Field to store the prompt key", - null=True, - ), - ), - ("is_assert", models.BooleanField(default=False)), - ("active", models.BooleanField(default=True)), - ( - "output_metadata", - models.JSONField( - db_column="output_metadata", - db_comment="JSON adapter metadata for the FE to load the pagination", - default=dict, - ), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "profile_manager", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_profile_manager", - to="prompt_profile_manager.profilemanager", - ), - ), - ( - "tool_id", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="mapped_prompt", - to="prompt_studio_core.customtool", - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0002_prompt_eval_metrics.py b/backend/prompt_studio/prompt_studio/migrations/0002_prompt_eval_metrics.py deleted file mode 100644 index 3f9f7fb80..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0002_prompt_eval_metrics.py +++ /dev/null @@ -1,48 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-22 18:10 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio", "0001_initial"), - ] - - operations = [ - migrations.AddField( - model_name="toolstudioprompt", - name="eval_guidance_completeness", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="toolstudioprompt", - name="eval_guidance_toxicity", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="toolstudioprompt", - name="eval_quality_correctness", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="toolstudioprompt", - name="eval_quality_faithfulness", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="toolstudioprompt", - name="eval_quality_relevance", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="toolstudioprompt", - name="eval_security_pii", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="toolstudioprompt", - name="evaluate", - field=models.BooleanField(default=True), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0003_remove_toolstudioprompt_updated_at_and_more.py b/backend/prompt_studio/prompt_studio/migrations/0003_remove_toolstudioprompt_updated_at_and_more.py deleted file mode 100644 index 74a6713c6..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0003_remove_toolstudioprompt_updated_at_and_more.py +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-23 19:02 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio", "0002_prompt_eval_metrics"), - ] - - operations = [ - migrations.RemoveField( - model_name="toolstudioprompt", - name="updated_at", - ), - migrations.AddField( - model_name="toolstudioprompt", - name="modified_at", - field=models.DateTimeField(auto_now=True), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0004_alter_toolstudioprompt_prompt.py b/backend/prompt_studio/prompt_studio/migrations/0004_alter_toolstudioprompt_prompt.py deleted file mode 100644 index 5fe3253fe..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0004_alter_toolstudioprompt_prompt.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-13 11:08 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio", "0003_remove_toolstudioprompt_updated_at_and_more"), - ] - - operations = [ - migrations.AlterField( - model_name="toolstudioprompt", - name="prompt", - field=models.TextField(blank=True, db_comment="Field to store the prompt"), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0005_alter_toolstudioprompt_enforce_type.py b/backend/prompt_studio/prompt_studio/migrations/0005_alter_toolstudioprompt_enforce_type.py deleted file mode 100644 index 12f6fa9d1..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0005_alter_toolstudioprompt_enforce_type.py +++ /dev/null @@ -1,29 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-29 10:30 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio", "0004_alter_toolstudioprompt_prompt"), - ] - - operations = [ - migrations.AlterField( - model_name="toolstudioprompt", - name="enforce_type", - field=models.TextField( - blank=True, - choices=[ - ("Text", "Response sent as Text"), - ("number", "Response sent as number"), - ("email", "Response sent as email"), - ("date", "Response sent as date"), - ("boolean", "Response sent as boolean"), - ("json", "Response sent as json"), - ], - db_comment="Field to store the type in which the response to be returned.", - default="Text", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0006_alter_toolstudioprompt_prompt_key_and_more.py b/backend/prompt_studio/prompt_studio/migrations/0006_alter_toolstudioprompt_prompt_key_and_more.py deleted file mode 100644 index eb695e3e9..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0006_alter_toolstudioprompt_prompt_key_and_more.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-04 07:26 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio", "0005_alter_toolstudioprompt_enforce_type"), - ] - - operations = [ - migrations.AlterField( - model_name="toolstudioprompt", - name="prompt_key", - field=models.TextField(db_comment="Field to store the prompt key"), - ), - migrations.AddConstraint( - model_name="toolstudioprompt", - constraint=models.UniqueConstraint( - fields=("prompt_key", "tool_id"), - name="unique_prompt_key_tool_id", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0007_alter_toolstudioprompt_enforce_type.py b/backend/prompt_studio/prompt_studio/migrations/0007_alter_toolstudioprompt_enforce_type.py deleted file mode 100644 index 3a83c4919..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0007_alter_toolstudioprompt_enforce_type.py +++ /dev/null @@ -1,31 +0,0 @@ -# Generated by Django 4.2.1 on 2024-08-07 14:20 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio", "0006_alter_toolstudioprompt_prompt_key_and_more"), - ] - - operations = [ - migrations.AlterField( - model_name="toolstudioprompt", - name="enforce_type", - field=models.TextField( - blank=True, - choices=[ - ("Text", "Response sent as Text"), - ("number", "Response sent as number"), - ("email", "Response sent as email"), - ("date", "Response sent as date"), - ("boolean", "Response sent as boolean"), - ("json", "Response sent as json"), - ("table", "Response sent as table"), - ], - db_comment="Field to store the type in which the response to be returned.", - default="Text", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/0008_alter_toolstudioprompt_enforce_type.py b/backend/prompt_studio/prompt_studio/migrations/0008_alter_toolstudioprompt_enforce_type.py deleted file mode 100644 index 5e6012a54..000000000 --- a/backend/prompt_studio/prompt_studio/migrations/0008_alter_toolstudioprompt_enforce_type.py +++ /dev/null @@ -1,35 +0,0 @@ -# Generated by Django 4.2.1 on 2024-09-26 08:38 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio", "0007_alter_toolstudioprompt_enforce_type"), - ] - - operations = [ - migrations.AlterField( - model_name="toolstudioprompt", - name="enforce_type", - field=models.TextField( - blank=True, - choices=[ - ("Text", "Response sent as Text"), - ("number", "Response sent as number"), - ("email", "Response sent as email"), - ("date", "Response sent as date"), - ("boolean", "Response sent as boolean"), - ("json", "Response sent as json"), - ("table", "Response sent as table"), - ( - "record", - "Response sent for records. Entries of records are list of logical and organized individual entities with distint values", - ), - ], - db_comment="Field to store the type in which the response to be returned.", - default="Text", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio/migrations/__init__.py b/backend/prompt_studio/prompt_studio/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio/models.py b/backend/prompt_studio/prompt_studio/models.py deleted file mode 100644 index 5cc054efb..000000000 --- a/backend/prompt_studio/prompt_studio/models.py +++ /dev/null @@ -1,131 +0,0 @@ -import uuid - -from account.models import User -from django.db import models -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio_core.models import CustomTool -from utils.models.base_model import BaseModel - - -class ToolStudioPrompt(BaseModel): - """Model class while store Prompt data for Custom Tool Studio. - - It has Many to one relation with CustomTool for ToolStudio. - """ - - class EnforceType(models.TextChoices): - TEXT = "Text", "Response sent as Text" - NUMBER = "number", "Response sent as number" - EMAIL = "email", "Response sent as email" - DATE = "date", "Response sent as date" - BOOLEAN = "boolean", "Response sent as boolean" - JSON = "json", "Response sent as json" - TABLE = "table", "Response sent as table" - RECORD = "record", ( - "Response sent for records. " - "Entries of records are list of " - "logical and organized individual " - "entities with distint values" - ) - - class PromptType(models.TextChoices): - PROMPT = "PROMPT", "Response sent as Text" - NOTES = "NOTES", "Response sent as float" - - class Mode(models.TextChoices): - DEFAULT = "Default", "Default choice for output" - - prompt_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - prompt_key = models.TextField( - blank=False, - db_comment="Field to store the prompt key", - ) - enforce_type = models.TextField( - blank=True, - db_comment="Field to store the type in \ - which the response to be returned.", - choices=EnforceType.choices, - default=EnforceType.TEXT, - ) - prompt = models.TextField( - blank=True, db_comment="Field to store the prompt", unique=False - ) - tool_id = models.ForeignKey( - CustomTool, - on_delete=models.SET_NULL, - related_name="mapped_prompt", - null=True, - blank=True, - ) - sequence_number = models.IntegerField(null=True, blank=True) - prompt_type = models.TextField( - blank=True, - db_comment="Field to store the type of the input prompt", - choices=PromptType.choices, - ) - profile_manager = models.ForeignKey( - ProfileManager, - on_delete=models.SET_NULL, - related_name="prompt_profile_manager", - null=True, - blank=True, - ) - output = models.TextField(blank=True) - # TODO: Remove below 3 fields related to assertion - assert_prompt = models.TextField( - blank=True, - null=True, - db_comment="Field to store the asserted prompt", - unique=False, - ) - assertion_failure_prompt = models.TextField( - blank=True, - null=True, - db_comment="Field to store the prompt key", - unique=False, - ) - is_assert = models.BooleanField(default=False) - active = models.BooleanField(default=True, null=False, blank=False) - output_metadata = models.JSONField( - db_column="output_metadata", - null=False, - blank=False, - default=dict, - db_comment="JSON adapter metadata for the FE to load the pagination", - ) - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_created_by", - null=True, - blank=True, - editable=False, - ) - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_modified_by", - null=True, - blank=True, - editable=False, - ) - # Eval settings for the prompt - # NOTE: - # - Field name format is eval__ - # - Metric name alone should be UNIQUE across all eval metrics - evaluate = models.BooleanField(default=True) - eval_quality_faithfulness = models.BooleanField(default=True) - eval_quality_correctness = models.BooleanField(default=True) - eval_quality_relevance = models.BooleanField(default=True) - eval_security_pii = models.BooleanField(default=True) - eval_guidance_toxicity = models.BooleanField(default=True) - eval_guidance_completeness = models.BooleanField(default=True) - # - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["prompt_key", "tool_id"], - name="unique_prompt_key_tool_id", - ), - ] diff --git a/backend/prompt_studio/prompt_studio/serializers.py b/backend/prompt_studio/prompt_studio/serializers.py deleted file mode 100644 index e1adddc33..000000000 --- a/backend/prompt_studio/prompt_studio/serializers.py +++ /dev/null @@ -1,33 +0,0 @@ -from rest_framework import serializers - -from backend.serializers import AuditSerializer - -from .models import ToolStudioPrompt - - -class ToolStudioPromptSerializer(AuditSerializer): - class Meta: - model = ToolStudioPrompt - fields = "__all__" - - -class ToolStudioIndexSerializer(serializers.Serializer): - file_name = serializers.CharField() - tool_id = serializers.CharField() - - -class ReorderPromptsSerializer(serializers.Serializer): - start_sequence_number = serializers.IntegerField(required=True) - end_sequence_number = serializers.IntegerField(required=True) - prompt_id = serializers.CharField(required=True) - - def validate(self, data): - start_sequence_number = data.get("start_sequence_number") - end_sequence_number = data.get("end_sequence_number") - - if start_sequence_number == end_sequence_number: - raise serializers.ValidationError( - "Start and end sequence numbers cannot be the same." - ) - - return data diff --git a/backend/prompt_studio/prompt_studio/urls.py b/backend/prompt_studio/prompt_studio/urls.py deleted file mode 100644 index 23e5f0243..000000000 --- a/backend/prompt_studio/prompt_studio/urls.py +++ /dev/null @@ -1,30 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns - -from .views import ToolStudioPromptView - -prompt_studio_prompt_detail = ToolStudioPromptView.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } -) - -reorder_prompts = ToolStudioPromptView.as_view({"post": "reorder_prompts"}) - -urlpatterns = format_suffix_patterns( - [ - path( - "prompt//", - prompt_studio_prompt_detail, - name="tool-studio-prompt-detail", - ), - path( - "prompt/reorder/", - reorder_prompts, - name="reorder_prompts", - ), - ] -) diff --git a/backend/prompt_studio/prompt_studio/views.py b/backend/prompt_studio/prompt_studio/views.py deleted file mode 100644 index a7d734f94..000000000 --- a/backend/prompt_studio/prompt_studio/views.py +++ /dev/null @@ -1,56 +0,0 @@ -from typing import Optional - -from django.db.models import QuerySet -from prompt_studio.permission import PromptAcesssToUser -from prompt_studio.prompt_studio.constants import ToolStudioPromptKeys -from prompt_studio.prompt_studio.controller import PromptStudioController -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio.serializers import ToolStudioPromptSerializer -from rest_framework import viewsets -from rest_framework.decorators import action -from rest_framework.request import Request -from rest_framework.response import Response -from rest_framework.versioning import URLPathVersioning -from utils.filtering import FilterHelper - - -class ToolStudioPromptView(viewsets.ModelViewSet): - """Viewset to handle all Tool Studio prompt related API logics. - - Args: - viewsets (_type_) - - Raises: - DuplicateData - FilenameMissingError - IndexingError - ValidationError - """ - - versioning_class = URLPathVersioning - serializer_class = ToolStudioPromptSerializer - permission_classes: list[type[PromptAcesssToUser]] = [PromptAcesssToUser] - - def get_queryset(self) -> Optional[QuerySet]: - filter_args = FilterHelper.build_filter_args( - self.request, - ToolStudioPromptKeys.TOOL_ID, - ) - if filter_args: - queryset = ToolStudioPrompt.objects.filter(**filter_args) - else: - queryset = ToolStudioPrompt.objects.all() - return queryset - - @action(detail=True, methods=["post"]) - def reorder_prompts(self, request: Request) -> Response: - """Reorder the sequence of prompts based on the provided data. - - Args: - request (Request): The HTTP request containing the reorder data. - - Returns: - Response: The HTTP response indicating the status of the reorder operation. - """ - prompt_studio_controller = PromptStudioController() - return prompt_studio_controller.reorder_prompts(request, ToolStudioPrompt) diff --git a/backend/prompt_studio/prompt_studio_core/__init__.py b/backend/prompt_studio/prompt_studio_core/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_core/admin.py b/backend/prompt_studio/prompt_studio_core/admin.py deleted file mode 100644 index e6e1457ea..000000000 --- a/backend/prompt_studio/prompt_studio_core/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import CustomTool - -admin.site.register(CustomTool) diff --git a/backend/prompt_studio/prompt_studio_core/apps.py b/backend/prompt_studio/prompt_studio_core/apps.py deleted file mode 100644 index 15184a96e..000000000 --- a/backend/prompt_studio/prompt_studio_core/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class CustomTool(AppConfig): - name = "prompt_studio.prompt_studio_core" diff --git a/backend/prompt_studio/prompt_studio_core/constants.py b/backend/prompt_studio/prompt_studio_core/constants.py deleted file mode 100644 index 97141e9f0..000000000 --- a/backend/prompt_studio/prompt_studio_core/constants.py +++ /dev/null @@ -1,134 +0,0 @@ -from enum import Enum - - -class ToolStudioKeys: - CREATED_BY = "created_by" - TOOL_ID = "tool_id" - PROMPTS = "prompts" - PLATFORM_SERVICE_API_KEY = "PLATFORM_SERVICE_API_KEY" - SUMMARIZE_LLM_PROFILE = "summarize_llm_profile" - DEFAULT_PROFILE = "default_profile" - - -class ToolStudioErrors: - SERIALIZATION_FAILED = "Data Serialization Failed." - TOOL_NAME_EXISTS = "Tool with the name already exists" - DUPLICATE_API = "It appears that a duplicate call may have been made." - PLATFORM_ERROR = "Seems an error occured in Platform Service." - PROMPT_NAME_EXISTS = "Prompt with the name already exists" - - -class ToolStudioPromptKeys: - CREATED_BY = "created_by" - TOOL_ID = "tool_id" - RUN_ID = "run_id" - NUMBER = "Number" - FLOAT = "Float" - PG_VECTOR = "Postgres pg_vector" - ANSWERS = "answers" - UNIQUE_FILE_ID = "unique_file_id" - ID = "id" - FILE_NAME = "file_name" - FILE_HASH = "file_hash" - TOOL_ID = "tool_id" - NAME = "name" - ACTIVE = "active" - PROMPT = "prompt" - CHUNK_SIZE = "chunk-size" - PROMPTX = "promptx" - VECTOR_DB = "vector-db" - EMBEDDING = "embedding" - X2TEXT_ADAPTER = "x2text_adapter" - CHUNK_OVERLAP = "chunk-overlap" - LLM = "llm" - IS_ASSERT = "is_assert" - ASSERTION_FAILURE_PROMPT = "assertion_failure_prompt" - RETRIEVAL_STRATEGY = "retrieval-strategy" - SIMPLE = "simple" - TYPE = "type" - NUMBER = "number" - EMAIL = "email" - DATE = "date" - BOOLEAN = "boolean" - JSON = "json" - PREAMBLE = "preamble" - SIMILARITY_TOP_K = "similarity-top-k" - PROMPT_TOKENS = "prompt_tokens" - COMPLETION_TOKENS = "completion_tokens" - TOTAL_TOKENS = "total_tokens" - RESPONSE = "response" - POSTAMBLE = "postamble" - GRAMMAR = "grammar" - WORD = "word" - SYNONYMS = "synonyms" - OUTPUTS = "outputs" - SECTION = "section" - DEFAULT = "default" - REINDEX = "reindex" - EMBEDDING_SUFFIX = "embedding_suffix" - EVAL_METRIC_PREFIX = "eval_" - EVAL_RESULT_DELIM = "__" - EVAL_SETTINGS = "eval_settings" - EVAL_SETTINGS_EVALUATE = "evaluate" - EVAL_SETTINGS_MONITOR_LLM = "monitor_llm" - EVAL_SETTINGS_EXCLUDE_FAILED = "exclude_failed" - SUMMARIZE = "summarize" - SUMMARIZED_RESULT = "summarized_result" - DOCUMENT_ID = "document_id" - EXTRACT = "extract" - TOOL_SETTINGS = "tool_settings" - ENABLE_CHALLENGE = "enable_challenge" - CHALLENGE_LLM = "challenge_llm" - SINGLE_PASS_EXTRACTION_MODE = "single_pass_extraction_mode" - SINGLE_PASS_EXTRACTION = "single_pass_extraction" - NOTES = "NOTES" - OUTPUT = "output" - SEQUENCE_NUMBER = "sequence_number" - PROFILE_MANAGER_ID = "profile_manager" - CONTEXT = "context" - METADATA = "metadata" - INCLUDE_METADATA = "include_metadata" - TXT_EXTENTION = ".txt" - TABLE = "table" - EXTRACT = "extract" - PLATFORM_POSTAMBLE = "platform_postamble" - SUMMARIZE_AS_SOURCE = "summarize_as_source" - VARIABLE_MAP = "variable_map" - RECORD = "record" - ENABLE_HIGHLIGHT = "enable_highlight" - - -class FileViewTypes: - ORIGINAL = "ORIGINAL" - EXTRACT = "EXTRACT" - SUMMARIZE = "SUMMARIZE" - - -class LogLevels: - INFO = "INFO" - ERROR = "ERROR" - DEBUG = "DEBUG" - RUN = "RUN" - - -class IndexingStatus(Enum): - PENDING_STATUS = "pending" - COMPLETED_STATUS = "completed" - STARTED_STATUS = "started" - DOCUMENT_BEING_INDEXED = "Document is being indexed" - - -class DefaultPrompts: - PREAMBLE = ( - "Your ability to extract and summarize this context accurately " - "is essential for effective analysis. " - "Pay close attention to the context's language, structure, and any " - "cross-references to ensure a comprehensive and precise extraction " - "of information. Do not use prior knowledge or information from " - "outside the context to answer the questions. Only use the " - "information provided in the context to answer the questions." - ) - POSTAMBLE = ( - "Do not include any explanation in the reply. " - "Only include the extracted information in the reply." - ) diff --git a/backend/prompt_studio/prompt_studio_core/document_indexing_service.py b/backend/prompt_studio/prompt_studio_core/document_indexing_service.py deleted file mode 100644 index 539c5a2dc..000000000 --- a/backend/prompt_studio/prompt_studio_core/document_indexing_service.py +++ /dev/null @@ -1,53 +0,0 @@ -from typing import Optional - -from django.conf import settings -from prompt_studio.prompt_studio_core.constants import IndexingStatus -from utils.cache_service import CacheService - - -class DocumentIndexingService: - CACHE_PREFIX = "document_indexing:" - - @classmethod - def set_document_indexing(cls, org_id: str, user_id: str, doc_id_key: str) -> None: - CacheService.set_key( - cls._cache_key(org_id, user_id, doc_id_key), - IndexingStatus.STARTED_STATUS.value, - expire=settings.INDEXING_FLAG_TTL, - ) - - @classmethod - def is_document_indexing(cls, org_id: str, user_id: str, doc_id_key: str) -> bool: - return ( - CacheService.get_key(cls._cache_key(org_id, user_id, doc_id_key)) - == IndexingStatus.STARTED_STATUS.value - ) - - @classmethod - def mark_document_indexed( - cls, org_id: str, user_id: str, doc_id_key: str, doc_id: str - ) -> None: - CacheService.set_key( - cls._cache_key(org_id, user_id, doc_id_key), - doc_id, - expire=settings.INDEXING_FLAG_TTL, - ) - - @classmethod - def get_indexed_document_id( - cls, org_id: str, user_id: str, doc_id_key: str - ) -> Optional[str]: - result = CacheService.get_key(cls._cache_key(org_id, user_id, doc_id_key)) - if result and result != IndexingStatus.STARTED_STATUS.value: - return result - return None - - @classmethod - def remove_document_indexing( - cls, org_id: str, user_id: str, doc_id_key: str - ) -> None: - CacheService.delete_a_key(cls._cache_key(org_id, user_id, doc_id_key)) - - @classmethod - def _cache_key(cls, org_id: str, user_id: str, doc_id_key: str) -> str: - return f"{cls.CACHE_PREFIX}{org_id}:{user_id}:{doc_id_key}" diff --git a/backend/prompt_studio/prompt_studio_core/exceptions.py b/backend/prompt_studio/prompt_studio_core/exceptions.py deleted file mode 100644 index a3f337b6c..000000000 --- a/backend/prompt_studio/prompt_studio_core/exceptions.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import Optional - -from prompt_studio.prompt_profile_manager.constants import ProfileManagerKeys -from prompt_studio.prompt_studio_core.constants import ToolStudioErrors -from rest_framework.exceptions import APIException - - -class PlatformServiceError(APIException): - status_code = 400 - default_detail = ToolStudioErrors.PLATFORM_ERROR - - -class ToolNotValid(APIException): - status_code = 400 - default_detail = "Custom tool is not valid." - - -class IndexingAPIError(APIException): - status_code = 500 - default_detail = "Error while indexing file" - - -class AnswerFetchError(APIException): - status_code = 500 - default_detail = "Error occured while fetching response for the prompt" - - -class DefaultProfileError(APIException): - status_code = 500 - default_detail = ( - "Default LLM profile is not configured." - "Please set an LLM profile as default to continue." - ) - - -class EnvRequired(APIException): - status_code = 404 - default_detail = "Environment variable not set" - - -class OutputSaveError(APIException): - status_code = 500 - default_detail = "Unable to store the output." - - -class ToolDeleteError(APIException): - status_code = 500 - default_detail = "Failed to delete the error" - - -class NoPromptsFound(APIException): - status_code = 404 - default_detail = "No prompts available to process" - - -class PermissionError(APIException): - status_code = 403 - default_detail = "You do not have permission to perform this action." - - -class EmptyPromptError(APIException): - status_code = 422 - default_detail = "Prompt(s) cannot be empty" - - -class MaxProfilesReachedError(APIException): - status_code = 403 - default_detail = ( - f"Maximum number of profiles (max {ProfileManagerKeys.MAX_PROFILE_COUNT})" - " per prompt studio project has been reached." - ) - - -class OperationNotSupported(APIException): - status_code = 403 - default_detail = ( - "This feature is not supported " - "in the open-source version. " - "Please check our cloud or enterprise on-premise offering " - "for access to this functionality." - ) - - -class PromptNotRun(APIException): - status_code = 403 - default_detail = ( - "The prompt must be executed before " - "it can be used as a variable in another prompt. " - "Please execute the prompt first and try again." - ) - - def __init__(self, detail: Optional[str] = None, code: Optional[int] = None): - if detail is not None: - self.detail = detail - if code is not None: - self.code = code - super().__init__(detail, code) diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0001_initial.py b/backend/prompt_studio/prompt_studio_core/migrations/0001_initial.py deleted file mode 100644 index 14ae4c61a..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0001_initial.py +++ /dev/null @@ -1,122 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-20 08:04 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("prompt_profile_manager", "0001_initial"), - ] - - operations = [ - migrations.CreateModel( - name="CustomTool", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "tool_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ("tool_name", models.TextField(unique=True)), - ("description", models.TextField()), - ( - "author", - models.TextField( - db_comment="Specific to the user who created the tool." - ), - ), - ( - "icon", - models.TextField( - blank=True, - db_comment="Field to store icon url for the custom tool.", - ), - ), - ( - "output", - models.TextField( - blank=True, - choices=[ - ("JSON", "Output stored as JSON"), - ("YAML", "Output stored as YAML"), - ], - db_comment="Field to store the output format type.", - ), - ), - ( - "log_id", - models.UUIDField( - db_comment="Field to store unique log_id for polling", - default=uuid.uuid4, - ), - ), - ( - "preamble", - models.TextField(blank=True, db_comment="Preamble to the prompts"), - ), - ( - "postamble", - models.TextField( - blank=True, - db_comment="Appended as postable to prompts.", - ), - ), - ( - "prompt_grammer", - models.JSONField( - blank=True, - db_comment="Synonymous words used in prompt", - null=True, - ), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="tool_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "default_profile", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="default_profile", - to="prompt_profile_manager.profilemanager", - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="tool_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0002_alter_customtool_output.py b/backend/prompt_studio/prompt_studio_core/migrations/0002_alter_customtool_output.py deleted file mode 100644 index 1d90060e2..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0002_alter_customtool_output.py +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-25 07:22 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_core", "0001_initial"), - ] - - operations = [ - migrations.AlterField( - model_name="customtool", - name="output", - field=models.TextField( - blank=True, db_comment="Field to store the output format type." - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0002_remove_customtool_updated_at_customtool_modified_at.py b/backend/prompt_studio/prompt_studio_core/migrations/0002_remove_customtool_updated_at_customtool_modified_at.py deleted file mode 100644 index 29393e4ca..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0002_remove_customtool_updated_at_customtool_modified_at.py +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-23 19:02 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_core", "0001_initial"), - ] - - operations = [ - migrations.RemoveField( - model_name="customtool", - name="updated_at", - ), - migrations.AddField( - model_name="customtool", - name="modified_at", - field=models.DateTimeField(auto_now=True), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0003_merge_20240125_1501.py b/backend/prompt_studio/prompt_studio_core/migrations/0003_merge_20240125_1501.py deleted file mode 100644 index da66a19e0..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0003_merge_20240125_1501.py +++ /dev/null @@ -1,15 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-25 15:01 - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_core", "0002_alter_customtool_output"), - ( - "prompt_studio_core", - "0002_remove_customtool_updated_at_customtool_modified_at", - ), - ] - - operations = [] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0004_customtool_summarize_as_source_and_more.py b/backend/prompt_studio/prompt_studio_core/migrations/0004_customtool_summarize_as_source_and_more.py deleted file mode 100644 index 125ca3b2b..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0004_customtool_summarize_as_source_and_more.py +++ /dev/null @@ -1,42 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-27 05:43 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_profile_manager", "0006_alter_profilemanager_x2text"), - ("prompt_studio_core", "0003_merge_20240125_1501"), - ] - - operations = [ - migrations.AddField( - model_name="customtool", - name="summarize_as_source", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="customtool", - name="summarize_context", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="customtool", - name="summarize_llm_profile", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="summarize_llm_profile", - to="prompt_profile_manager.profilemanager", - ), - ), - migrations.AddField( - model_name="customtool", - name="summarize_prompt", - field=models.TextField( - blank=True, db_comment="Field to store the summarize prompt" - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0005_alter_customtool_default_profile_and_more.py b/backend/prompt_studio/prompt_studio_core/migrations/0005_alter_customtool_default_profile_and_more.py deleted file mode 100644 index f8c53210a..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0005_alter_customtool_default_profile_and_more.py +++ /dev/null @@ -1,53 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-28 09:03 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_profile_manager", "0006_alter_profilemanager_x2text"), - ("prompt_studio_core", "0004_customtool_summarize_as_source_and_more"), - ] - - operations = [ - migrations.AlterField( - model_name="customtool", - name="default_profile", - field=models.ForeignKey( - blank=True, - db_comment="Default LLM Profile used in prompt", - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="default_profile", - to="prompt_profile_manager.profilemanager", - ), - ), - migrations.AlterField( - model_name="customtool", - name="summarize_as_source", - field=models.BooleanField( - db_comment="Flag to use summarized content as source", - default=True, - ), - ), - migrations.AlterField( - model_name="customtool", - name="summarize_context", - field=models.BooleanField( - db_comment="Flag to summarize content", default=True - ), - ), - migrations.AlterField( - model_name="customtool", - name="summarize_llm_profile", - field=models.ForeignKey( - blank=True, - db_comment="LLM Profile used for summarize", - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="summarize_llm_profile", - to="prompt_profile_manager.profilemanager", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0006_alter_customtool_summarize_as_source_and_more.py b/backend/prompt_studio/prompt_studio_core/migrations/0006_alter_customtool_summarize_as_source_and_more.py deleted file mode 100644 index 27f529486..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0006_alter_customtool_summarize_as_source_and_more.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-02 07:30 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0005_alter_customtool_default_profile_and_more", - ), - ] - - operations = [ - migrations.AlterField( - model_name="customtool", - name="summarize_as_source", - field=models.BooleanField( - db_comment="Flag to use summarized content as source", - default=False, - ), - ), - migrations.AlterField( - model_name="customtool", - name="summarize_context", - field=models.BooleanField( - db_comment="Flag to summarize content", default=False - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0007_remove_customtool_default_profile_and_more.py b/backend/prompt_studio/prompt_studio_core/migrations/0007_remove_customtool_default_profile_and_more.py deleted file mode 100644 index 93157a8a1..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0007_remove_customtool_default_profile_and_more.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-07 06:25 - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0006_alter_customtool_summarize_as_source_and_more", - ), - ] - - operations = [ - migrations.RemoveField( - model_name="customtool", - name="default_profile", - ), - migrations.RemoveField( - model_name="customtool", - name="summarize_llm_profile", - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0008_customtool_exclude_failed_customtool_monitor_llm.py b/backend/prompt_studio/prompt_studio_core/migrations/0008_customtool_exclude_failed_customtool_monitor_llm.py deleted file mode 100644 index 3173d5604..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0008_customtool_exclude_failed_customtool_monitor_llm.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-13 23:54 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("adapter_processor", "0005_alter_adapterinstance_adapter_type"), - ( - "prompt_studio_core", - "0007_remove_customtool_default_profile_and_more", - ), - ] - - operations = [ - migrations.AddField( - model_name="customtool", - name="exclude_failed", - field=models.BooleanField(default=True), - ), - migrations.AddField( - model_name="customtool", - name="monitor_llm", - field=models.ForeignKey( - blank=True, - db_comment="Field to store monitor llm", - null=True, - on_delete=django.db.models.deletion.PROTECT, - to="adapter_processor.adapterinstance", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0009_customtool_single_pass_extraction_mode_and_more.py b/backend/prompt_studio/prompt_studio_core/migrations/0009_customtool_single_pass_extraction_mode_and_more.py deleted file mode 100644 index 040497c0f..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0009_customtool_single_pass_extraction_mode_and_more.py +++ /dev/null @@ -1,31 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-18 09:57 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0008_customtool_exclude_failed_customtool_monitor_llm", - ), - ] - - operations = [ - migrations.AddField( - model_name="customtool", - name="single_pass_extraction_mode", - field=models.BooleanField( - db_comment="Flag to enable or disable single pass extraction mode", - default=False, - ), - ), - migrations.AlterField( - model_name="customtool", - name="exclude_failed", - field=models.BooleanField( - db_comment="Flag to make the answer null if it is incorrect", - default=True, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0010_customtool_challenge_llm_customtool_enable_challenge_and_more.py b/backend/prompt_studio/prompt_studio_core/migrations/0010_customtool_challenge_llm_customtool_enable_challenge_and_more.py deleted file mode 100644 index 8b879c16e..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0010_customtool_challenge_llm_customtool_enable_challenge_and_more.py +++ /dev/null @@ -1,59 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-24 00:31 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "adapter_processor", - "0007_remove_adapterinstance_is_default_userdefaultadapter", - ), - ( - "prompt_studio_core", - "0009_customtool_single_pass_extraction_mode_and_more", - ), - ] - - operations = [ - migrations.AddField( - model_name="customtool", - name="challenge_llm", - field=models.ForeignKey( - blank=True, - db_comment="Field to store challenge llm", - null=True, - on_delete=django.db.models.deletion.PROTECT, - related_name="challenge_customtools", - to="adapter_processor.adapterinstance", - ), - ), - migrations.AddField( - model_name="customtool", - name="enable_challenge", - field=models.BooleanField( - db_comment="Flag to enable or disable challenge", default=False - ), - ), - migrations.AlterField( - model_name="customtool", - name="monitor_llm", - field=models.ForeignKey( - blank=True, - db_comment="Field to store monitor llm", - null=True, - on_delete=django.db.models.deletion.PROTECT, - related_name="monitor_customtools", - to="adapter_processor.adapterinstance", - ), - ), - migrations.AlterField( - model_name="customtool", - name="single_pass_extraction_mode", - field=models.BooleanField( - db_comment="Flag to enable or disable single pass extraction mode", - default=False, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0011_alter_customtool_postamble_alter_customtool_preamble.py b/backend/prompt_studio/prompt_studio_core/migrations/0011_alter_customtool_postamble_alter_customtool_preamble.py deleted file mode 100644 index f7772a9b9..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0011_alter_customtool_postamble_alter_customtool_preamble.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-24 10:56 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0010_customtool_challenge_llm_customtool_enable_challenge_and_more", - ), - ] - - operations = [ - migrations.AlterField( - model_name="customtool", - name="postamble", - field=models.TextField( - blank=True, - db_comment="Appended as postable to prompts.", - default="Do not include any explanation in the reply. Only include the extracted information in the reply.", - ), - ), - migrations.AlterField( - model_name="customtool", - name="preamble", - field=models.TextField( - blank=True, - db_comment="Preamble to the prompts", - default="Your ability to extract and summarize this context accurately is essential for effective analysis. Pay close attention to the context's language, structure, and any cross-references to ensure a comprehensive and precise extraction of information. Do not use prior knowledge or information from outside the context to answer the questions. Only use the information provided in the context to answer the questions.", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0012_customtool_shared_users.py b/backend/prompt_studio/prompt_studio_core/migrations/0012_customtool_shared_users.py deleted file mode 100644 index fa7a16bbe..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0012_customtool_shared_users.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-26 04:26 - -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ( - "prompt_studio_core", - "0011_alter_customtool_postamble_alter_customtool_preamble", - ), - ] - - operations = [ - migrations.AddField( - model_name="customtool", - name="shared_users", - field=models.ManyToManyField( - related_name="shared_custom_tool", to=settings.AUTH_USER_MODEL - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/0013_customtool_enable_highlight.py b/backend/prompt_studio/prompt_studio_core/migrations/0013_customtool_enable_highlight.py deleted file mode 100644 index eb7ee01f7..000000000 --- a/backend/prompt_studio/prompt_studio_core/migrations/0013_customtool_enable_highlight.py +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by Django 4.2.1 on 2024-06-13 06:45 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio_core", "0012_customtool_shared_users"), - ] - - operations = [ - migrations.AddField( - model_name="customtool", - name="enable_highlight", - field=models.BooleanField( - db_comment="Flag to enable or disable document highlighting", - default=False, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_core/migrations/__init__.py b/backend/prompt_studio/prompt_studio_core/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_core/models.py b/backend/prompt_studio/prompt_studio_core/models.py deleted file mode 100644 index 5075caceb..000000000 --- a/backend/prompt_studio/prompt_studio_core/models.py +++ /dev/null @@ -1,149 +0,0 @@ -import logging -import shutil -import uuid -from typing import Any - -from account.models import User -from adapter_processor.models import AdapterInstance -from django.db import models -from django.db.models import QuerySet -from file_management.file_management_helper import FileManagerHelper -from prompt_studio.prompt_studio_core.constants import DefaultPrompts -from utils.models.base_model import BaseModel - -logger = logging.getLogger(__name__) - - -class CustomToolModelManager(models.Manager): - def get_queryset(self) -> QuerySet[Any]: - return super().get_queryset() - - def for_user(self, user: User) -> QuerySet[Any]: - return ( - self.get_queryset() - .filter(models.Q(created_by=user) | models.Q(shared_users=user)) - .distinct("tool_id") - ) - - -class CustomTool(BaseModel): - """Model to store the custom tools designed in the tool studio.""" - - tool_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - tool_name = models.TextField(unique=True, blank=False, null=False) - description = models.TextField(blank=False, null=False) - author = models.TextField( - blank=False, - null=False, - db_comment="Specific to the user who created the tool.", - ) - icon = models.TextField( - blank=True, - db_comment="Field to store \ - icon url for the custom tool.", - ) - output = models.TextField( - db_comment="Field to store the output format type.", - blank=True, - ) - log_id = models.UUIDField( - default=uuid.uuid4, - db_comment="Field to store unique log_id for polling", - ) - - summarize_context = models.BooleanField( - default=False, db_comment="Flag to summarize content" - ) - summarize_as_source = models.BooleanField( - default=False, db_comment="Flag to use summarized content as source" - ) - summarize_prompt = models.TextField( - blank=True, - db_comment="Field to store the summarize prompt", - unique=False, - ) - preamble = models.TextField( - blank=True, - db_comment="Preamble to the prompts", - default=DefaultPrompts.PREAMBLE, - ) - postamble = models.TextField( - blank=True, - db_comment="Appended as postable to prompts.", - default=DefaultPrompts.POSTAMBLE, - ) - prompt_grammer = models.JSONField( - null=True, blank=True, db_comment="Synonymous words used in prompt" - ) - monitor_llm = models.ForeignKey( - AdapterInstance, - on_delete=models.PROTECT, - db_comment="Field to store monitor llm", - null=True, - blank=True, - related_name="monitor_customtools", - ) - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="tool_created_by", - null=True, - blank=True, - editable=False, - ) - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="tool_modified_by", - null=True, - blank=True, - editable=False, - ) - - exclude_failed = models.BooleanField( - db_comment="Flag to make the answer null if it is incorrect", - default=True, - ) - single_pass_extraction_mode = models.BooleanField( - db_comment="Flag to enable or disable single pass extraction mode", - default=False, - ) - challenge_llm = models.ForeignKey( - AdapterInstance, - on_delete=models.PROTECT, - db_comment="Field to store challenge llm", - null=True, - blank=True, - related_name="challenge_customtools", - ) - enable_challenge = models.BooleanField( - db_comment="Flag to enable or disable challenge", default=False - ) - - enable_highlight = models.BooleanField( - db_comment="Flag to enable or disable document highlighting", default=False - ) - - # Introduced field to establish M2M relation between users and custom_tool. - # This will introduce intermediary table which relates both the models. - shared_users = models.ManyToManyField(User, related_name="shared_custom_tool") - - objects = CustomToolModelManager() - - def delete(self, organization_id=None, *args, **kwargs): - # Delete the documents associated with the tool - file_path = FileManagerHelper.handle_sub_directory_for_tenants( - organization_id, - is_create=False, - user_id=self.created_by.user_id, - tool_id=str(self.tool_id), - ) - if organization_id: - try: - shutil.rmtree(file_path) - except FileNotFoundError: - logger.error(f"The folder {file_path} does not exist.") - except OSError as e: - logger.error(f"Error: {file_path} : {e.strerror}") - # Continue with the deletion of the tool - super().delete(*args, **kwargs) diff --git a/backend/prompt_studio/prompt_studio_core/prompt_ide_base_tool.py b/backend/prompt_studio/prompt_studio_core/prompt_ide_base_tool.py deleted file mode 100644 index d26252bfb..000000000 --- a/backend/prompt_studio/prompt_studio_core/prompt_ide_base_tool.py +++ /dev/null @@ -1,43 +0,0 @@ -import os - -from platform_settings.platform_auth_service import PlatformAuthenticationService -from prompt_studio.prompt_studio_core.constants import ToolStudioKeys -from unstract.sdk.constants import LogLevel -from unstract.sdk.tool.stream import StreamMixin - - -class PromptIdeBaseTool(StreamMixin): - def __init__(self, log_level: LogLevel = LogLevel.INFO, org_id: str = "") -> None: - """ - Args: - tool (UnstractAbstractTool): Instance of UnstractAbstractTool - Notes: - - PLATFORM_SERVICE_API_KEY environment variable is required. - """ - self.log_level = log_level - self.org_id = org_id - super().__init__(log_level=log_level) - - def get_env_or_die(self, env_key: str) -> str: - """Returns the value of an env variable. - - If its empty or None, raises an error and exits - - Args: - env_key (str): Key to retrieve - - Returns: - str: Value of the env - """ - # HACK: Adding platform key for multitenancy - if env_key == ToolStudioKeys.PLATFORM_SERVICE_API_KEY: - platform_key = PlatformAuthenticationService.get_active_platform_key( - self.org_id - ) - key: str = str(platform_key.key) - return key - else: - env_value = os.environ.get(env_key) - if env_value is None or env_value == "": - self.stream_error_and_exit(f"Env variable {env_key} is required") - return env_value # type:ignore diff --git a/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py b/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py deleted file mode 100644 index 34a646ddc..000000000 --- a/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py +++ /dev/null @@ -1,1119 +0,0 @@ -import json -import logging -import os -import time -import uuid -from pathlib import Path -from typing import Any, Callable, Optional - -from account.constants import Common -from account.models import User -from adapter_processor.constants import AdapterKeys -from adapter_processor.models import AdapterInstance -from django.conf import settings -from django.db.models.manager import BaseManager -from file_management.file_management_helper import FileManagerHelper -from prompt_studio.modifier_loader import ModifierConfig -from prompt_studio.modifier_loader import load_plugins as load_modifier_plugins -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_profile_manager.profile_manager_helper import ( - ProfileManagerHelper, -) -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio_core.constants import IndexingStatus, LogLevels -from prompt_studio.prompt_studio_core.constants import ToolStudioPromptKeys as TSPKeys -from prompt_studio.prompt_studio_core.document_indexing_service import ( - DocumentIndexingService, -) -from prompt_studio.prompt_studio_core.exceptions import ( - AnswerFetchError, - DefaultProfileError, - EmptyPromptError, - IndexingAPIError, - NoPromptsFound, - OperationNotSupported, - PermissionError, -) -from prompt_studio.prompt_studio_core.models import CustomTool -from prompt_studio.prompt_studio_core.prompt_ide_base_tool import PromptIdeBaseTool -from prompt_studio.prompt_studio_core.prompt_variable_service import ( - PromptStudioVariableService, -) -from prompt_studio.prompt_studio_document_manager.models import DocumentManager -from prompt_studio.prompt_studio_index_manager.prompt_studio_index_helper import ( # noqa: E501 - PromptStudioIndexHelper, -) -from prompt_studio.prompt_studio_output_manager.output_manager_helper import ( - OutputManagerHelper, -) -from unstract.sdk.constants import LogLevel -from unstract.sdk.exceptions import IndexingError, SdkError -from unstract.sdk.index import Index -from unstract.sdk.prompt import PromptTool -from unstract.sdk.utils.tool_utils import ToolUtils -from utils.local_context import StateStore - -from unstract.core.pubsub_helper import LogPublisher - -CHOICES_JSON = "/static/select_choices.json" -ERROR_MSG = "User %s doesn't have access to adapter %s" - -logger = logging.getLogger(__name__) - -modifier_plugins = load_modifier_plugins() - - -class PromptStudioHelper: - """Helper class for Custom tool operations.""" - - @staticmethod - def create_default_profile_manager(user: User, tool_id: uuid) -> None: - """Create a default profile manager for a given user and tool. - - Args: - user (User): The user for whom the default profile manager is - created. - tool_id (uuid): The ID of the tool for which the default profile - manager is created. - - Raises: - AdapterInstance.DoesNotExist: If no suitable adapter instance is - found for creating the default profile manager. - - Returns: - None - """ - try: - AdapterInstance.objects.get( - is_friction_less=True, - is_usable=True, - adapter_type=AdapterKeys.LLM, - ) - - default_adapters: BaseManager[AdapterInstance] = ( - AdapterInstance.objects.filter(is_friction_less=True) - ) - - profile_manager = ProfileManager( - prompt_studio_tool=CustomTool.objects.get(pk=tool_id), - is_default=True, - created_by=user, - modified_by=user, - chunk_size=0, - profile_name="sample profile", - chunk_overlap=0, - section="Default", - retrieval_strategy="simple", - similarity_top_k=3, - ) - - for adapter in default_adapters: - if adapter.adapter_type == AdapterKeys.LLM: - profile_manager.llm = adapter - elif adapter.adapter_type == AdapterKeys.VECTOR_DB: - profile_manager.vector_store = adapter - elif adapter.adapter_type == AdapterKeys.X2TEXT: - profile_manager.x2text = adapter - elif adapter.adapter_type == AdapterKeys.EMBEDDING: - profile_manager.embedding_model = adapter - - profile_manager.save() - - except AdapterInstance.DoesNotExist: - logger.info("skipping default profile creation") - - @staticmethod - def validate_adapter_status( - profile_manager: ProfileManager, - ) -> None: - """Helper method to validate the status of adapters in profile manager. - - Args: - profile_manager (ProfileManager): The profile manager instance to - validate. - - Raises: - PermissionError: If the owner does not have permission to perform - the action. - """ - - error_msg = "Permission Error: Free usage for the configured trial adapter exhausted.Please connect your own service accounts to continue.Please see our documentation for more details:https://docs.unstract.com/unstract_platform/setup_accounts/whats_needed" # noqa: E501 - adapters = [ - profile_manager.llm, - profile_manager.vector_store, - profile_manager.embedding_model, - profile_manager.x2text, - ] - - for adapter in adapters: - if not adapter.is_usable: - raise PermissionError(error_msg) - - @staticmethod - def validate_profile_manager_owner_access( - profile_manager: ProfileManager, - ) -> None: - """Helper method to validate the owner's access to the profile manager. - - Args: - profile_manager (ProfileManager): The profile manager instance to - validate. - - Raises: - PermissionError: If the owner does not have permission to perform - the action. - """ - profile_manager_owner = profile_manager.created_by - - is_llm_owned = ( - profile_manager.llm.shared_to_org - or profile_manager.llm.created_by == profile_manager_owner - or profile_manager.llm.shared_users.filter( - pk=profile_manager_owner.pk - ).exists() - ) - is_vector_store_owned = ( - profile_manager.vector_store.shared_to_org - or profile_manager.vector_store.created_by == profile_manager_owner - or profile_manager.vector_store.shared_users.filter( - pk=profile_manager_owner.pk - ).exists() - ) - is_embedding_model_owned = ( - profile_manager.embedding_model.shared_to_org - or profile_manager.embedding_model.created_by == profile_manager_owner - or profile_manager.embedding_model.shared_users.filter( - pk=profile_manager_owner.pk - ).exists() - ) - is_x2text_owned = ( - profile_manager.x2text.shared_to_org - or profile_manager.x2text.created_by == profile_manager_owner - or profile_manager.x2text.shared_users.filter( - pk=profile_manager_owner.pk - ).exists() - ) - - if not ( - is_llm_owned - and is_vector_store_owned - and is_embedding_model_owned - and is_x2text_owned - ): - adapter_names = set() - if not is_llm_owned: - logger.error( - ERROR_MSG, - profile_manager_owner.user_id, - profile_manager.llm.id, - ) - adapter_names.add(profile_manager.llm.adapter_name) - if not is_vector_store_owned: - logger.error( - ERROR_MSG, - profile_manager_owner.user_id, - profile_manager.vector_store.id, - ) - adapter_names.add(profile_manager.vector_store.adapter_name) - if not is_embedding_model_owned: - logger.error( - ERROR_MSG, - profile_manager_owner.user_id, - profile_manager.embedding_model.id, - ) - adapter_names.add(profile_manager.embedding_model.adapter_name) - if not is_x2text_owned: - logger.error( - ERROR_MSG, - profile_manager_owner.user_id, - profile_manager.x2text.id, - ) - adapter_names.add(profile_manager.x2text.adapter_name) - if len(adapter_names) > 1: - error_msg = ( - f"Multiple permission errors were encountered with {', '.join(adapter_names)}", # noqa: E501 - ) - else: - error_msg = ( - f"Permission Error: You do not have access to {adapter_names.pop()}", # noqa: E501 - ) - - raise PermissionError(error_msg) - - @staticmethod - def _publish_log( - component: dict[str, str], level: str, state: str, message: str - ) -> None: - LogPublisher.publish( - StateStore.get(Common.LOG_EVENTS_ID), - LogPublisher.log_prompt(component, level, state, message), - ) - - @staticmethod - def get_select_fields() -> dict[str, Any]: - """Method to fetch dropdown field values for frontend. - - Returns: - dict[str, Any]: Dict for dropdown data - """ - f = open(f"{os.path.dirname(__file__)}{CHOICES_JSON}") - choices = f.read() - f.close() - response: dict[str, Any] = json.loads(choices) - - for modifier_plugin in modifier_plugins: - cls = modifier_plugin[ModifierConfig.METADATA][ - ModifierConfig.METADATA_SERVICE_CLASS - ] - response = cls.update_select_choices(default_choices=response) - - return response - - @staticmethod - def _fetch_prompt_from_id(id: str) -> ToolStudioPrompt: - """Internal function used to fetch prompt from ID. - - Args: - id (_type_): UUID of the prompt - - Returns: - ToolStudioPrompt: Instance of the model - """ - prompt_instance: ToolStudioPrompt = ToolStudioPrompt.objects.get(pk=id) - return prompt_instance - - @staticmethod - def fetch_prompt_from_tool(tool_id: str) -> list[ToolStudioPrompt]: - """Internal function used to fetch mapped prompts from ToolID. - - Args: - tool_id (_type_): UUID of the tool - - Returns: - List[ToolStudioPrompt]: List of instance of the model - """ - prompt_instances: list[ToolStudioPrompt] = ToolStudioPrompt.objects.filter( - tool_id=tool_id - ).order_by(TSPKeys.SEQUENCE_NUMBER) - return prompt_instances - - @staticmethod - def index_document( - tool_id: str, - file_name: str, - org_id: str, - user_id: str, - document_id: str, - is_summary: bool = False, - run_id: str = None, - text_processor: Optional[type[Any]] = None, - ) -> Any: - """Method to index a document. - - Args: - tool_id (str): Id of the tool - file_name (str): File to parse - org_id (str): The ID of the organization to which the user belongs. - user_id (str): The ID of the user who uploaded the document. - is_summary (bool, optional): Whether the document is a summary - or not. Defaults to False. - - Raises: - ToolNotValid - IndexingError - """ - tool: CustomTool = CustomTool.objects.get(pk=tool_id) - if is_summary: - profile_manager: ProfileManager = ProfileManager.objects.get( - prompt_studio_tool=tool, is_summarize_llm=True - ) - default_profile = profile_manager - file_path = file_name - else: - default_profile = ProfileManager.get_default_llm_profile(tool) - file_path = FileManagerHelper.handle_sub_directory_for_tenants( - org_id, - is_create=False, - user_id=user_id, - tool_id=tool_id, - ) - file_path = str(Path(file_path) / file_name) - - start_time = time.time() - logger.info(f"[{tool_id}] Indexing started for doc: {file_name}") - PromptStudioHelper._publish_log( - {"tool_id": tool_id, "run_id": run_id, "doc_name": file_name}, - LogLevels.INFO, - LogLevels.RUN, - "Indexing started", - ) - - # Validate the status of adapter in profile manager - PromptStudioHelper.validate_adapter_status(default_profile) - # Need to check the user who created profile manager - # has access to adapters configured in profile manager - PromptStudioHelper.validate_profile_manager_owner_access(default_profile) - process_text = None - if text_processor: - process_text = text_processor.process - doc_id = PromptStudioHelper.dynamic_indexer( - profile_manager=default_profile, - tool_id=tool_id, - file_path=file_path, - org_id=org_id, - document_id=document_id, - is_summary=is_summary, - reindex=True, - run_id=run_id, - user_id=user_id, - process_text=process_text, - ) - - elapsed_time = time.time() - start_time - logger.info( - f"[{tool_id}] Indexing successful for doc: {file_name}," - f" took {elapsed_time:.3f}s" - ) - PromptStudioHelper._publish_log( - {"tool_id": tool_id, "run_id": run_id, "doc_name": file_name}, - LogLevels.INFO, - LogLevels.RUN, - f"Indexing successful, took {elapsed_time:.3f}s", - ) - - return doc_id.get("output") - - @staticmethod - def prompt_responder( - tool_id: str, - org_id: str, - user_id: str, - document_id: str, - id: Optional[str] = None, - run_id: str = None, - profile_manager_id: Optional[str] = None, - text_processor: Optional[type[Any]] = None, - ) -> Any: - """Execute chain/single run of the prompts. Makes a call to prompt - service and returns the dict of response. - - Args: - tool_id (str): ID of tool created in prompt studio - org_id (str): Organization ID - user_id (str): User's ID - document_id (str): UUID of the document uploaded - id (Optional[str]): ID of the prompt - profile_manager_id (Optional[str]): UUID of the profile manager - - Raises: - AnswerFetchError: Error from prompt-service - - Returns: - Any: Dictionary containing the response from prompt-service - """ - document: DocumentManager = DocumentManager.objects.get(pk=document_id) - doc_name: str = document.document_name - doc_path = PromptStudioHelper._get_document_path( - org_id, user_id, tool_id, doc_name - ) - - if id: - return PromptStudioHelper._execute_single_prompt( - id=id, - doc_path=doc_path, - doc_name=doc_name, - tool_id=tool_id, - org_id=org_id, - user_id=user_id, - document_id=document_id, - run_id=run_id, - profile_manager_id=profile_manager_id, - text_processor=text_processor, - ) - else: - return PromptStudioHelper._execute_prompts_in_single_pass( - doc_path=doc_path, - doc_name=doc_name, - tool_id=tool_id, - org_id=org_id, - user_id=user_id, - document_id=document_id, - run_id=run_id, - text_processor=text_processor, - ) - - @staticmethod - def _execute_single_prompt( - id, - doc_path, - doc_name, - tool_id, - org_id, - user_id, - document_id, - run_id, - profile_manager_id, - text_processor: Optional[type[Any]] = None, - ): - prompt_instance = PromptStudioHelper._fetch_prompt_from_id(id) - - if ( - prompt_instance.enforce_type == TSPKeys.TABLE - or prompt_instance.enforce_type == TSPKeys.RECORD - ) and not modifier_plugins: - raise OperationNotSupported() - - prompt_name = prompt_instance.prompt_key - PromptStudioHelper._publish_log( - { - "tool_id": tool_id, - "run_id": run_id, - "prompt_key": prompt_name, - "doc_name": doc_name, - }, - LogLevels.INFO, - LogLevels.RUN, - "Executing single prompt", - ) - prompts = [prompt_instance] - tool = prompt_instance.tool_id - - if tool.summarize_as_source: - directory, filename = os.path.split(doc_path) - doc_path = os.path.join( - directory, TSPKeys.SUMMARIZE, os.path.splitext(filename)[0] + ".txt" - ) - - PromptStudioHelper._publish_log( - { - "tool_id": tool_id, - "run_id": run_id, - "prompt_key": prompt_name, - "doc_name": doc_name, - }, - LogLevels.DEBUG, - LogLevels.RUN, - "Invoking prompt service", - ) - process_text = None - if text_processor: - process_text = text_processor.process - try: - response = PromptStudioHelper._fetch_response( - doc_path=doc_path, - doc_name=doc_name, - tool=tool, - prompt=prompt_instance, - org_id=org_id, - document_id=document_id, - run_id=run_id, - profile_manager_id=profile_manager_id, - user_id=user_id, - process_text=process_text, - ) - return PromptStudioHelper._handle_response( - response=response, - run_id=run_id, - prompts=prompts, - document_id=document_id, - is_single_pass=False, - profile_manager_id=profile_manager_id, - ) - except Exception as e: - logger.error( - f"[{tool.tool_id}] Error while fetching response for " - f"prompt {id} and doc {document_id}: {e}" - ) - msg = str(e) - PromptStudioHelper._publish_log( - { - "tool_id": tool_id, - "run_id": run_id, - "prompt_key": prompt_name, - "doc_name": doc_name, - }, - LogLevels.ERROR, - LogLevels.RUN, - msg, - ) - raise e - - @staticmethod - def _execute_prompts_in_single_pass( - doc_path, - doc_name, - tool_id, - org_id, - user_id, - document_id, - run_id, - text_processor: Optional[type[Any]] = None, - ): - prompts = PromptStudioHelper.fetch_prompt_from_tool(tool_id) - prompts = [ - prompt - for prompt in prompts - if prompt.prompt_type != TSPKeys.NOTES - and prompt.active - and prompt.enforce_type != TSPKeys.TABLE - and prompt.enforce_type != TSPKeys.RECORD - ] - if not prompts: - logger.error(f"[{tool_id or 'NA'}] No prompts found for id: {id}") - raise NoPromptsFound() - - PromptStudioHelper._publish_log( - {"tool_id": tool_id, "run_id": run_id, "prompt_id": str(id)}, - LogLevels.INFO, - LogLevels.RUN, - "Executing prompts in single pass", - ) - process_text = None - if text_processor: - process_text = text_processor.process - try: - tool = prompts[0].tool_id - response = PromptStudioHelper._fetch_single_pass_response( - file_path=doc_path, - doc_name=doc_name, - tool=tool, - prompts=prompts, - org_id=org_id, - document_id=document_id, - run_id=run_id, - user_id=user_id, - process_text=process_text, - ) - return PromptStudioHelper._handle_response( - response=response, - run_id=run_id, - prompts=prompts, - document_id=document_id, - is_single_pass=True, - ) - except Exception as e: - logger.error( - f"[{tool.tool_id}] Error while fetching single pass response: {e}" - ) - PromptStudioHelper._publish_log( - { - "tool_id": tool_id, - "run_id": run_id, - "prompt_id": str(id), - }, - LogLevels.ERROR, - LogLevels.RUN, - f"Failed to fetch single pass response. {e}", - ) - raise e - - @staticmethod - def _get_document_path(org_id, user_id, tool_id, doc_name): - doc_path = FileManagerHelper.handle_sub_directory_for_tenants( - org_id=org_id, - user_id=user_id, - tool_id=tool_id, - is_create=False, - ) - return str(Path(doc_path) / doc_name) - - @staticmethod - def _get_extract_or_summary_document_path( - org_id, user_id, tool_id, doc_name, doc_type - ) -> str: - doc_path = FileManagerHelper.handle_sub_directory_for_tenants( - org_id=org_id, - user_id=user_id, - tool_id=tool_id, - is_create=False, - ) - extracted_doc_name = Path(doc_name).stem + TSPKeys.TXT_EXTENTION - return str(Path(doc_path) / doc_type / extracted_doc_name) - - @staticmethod - def _handle_response( - response, - run_id, - prompts, - document_id, - is_single_pass, - profile_manager_id=None, - ): - if response.get("status") == IndexingStatus.PENDING_STATUS.value: - return { - "status": IndexingStatus.PENDING_STATUS.value, - "message": IndexingStatus.DOCUMENT_BEING_INDEXED.value, - } - - return OutputManagerHelper.handle_prompt_output_update( - run_id=run_id, - prompts=prompts, - outputs=response["output"], - document_id=document_id, - is_single_pass_extract=is_single_pass, - profile_manager_id=profile_manager_id, - metadata=response["metadata"], - ) - - @staticmethod - def _fetch_response( - tool: CustomTool, - doc_path: str, - doc_name: str, - prompt: ToolStudioPrompt, - org_id: str, - document_id: str, - run_id: str, - user_id: str, - profile_manager_id: Optional[str] = None, - process_text: Optional[Callable[[str], str]] = None, - ) -> Any: - """Utility function to invoke prompt service. Used internally. - - Args: - tool (CustomTool): CustomTool instance (prompt studio project) - doc_path (str): Path to the document - doc_name (str): Name of the document - prompt (ToolStudioPrompt): ToolStudioPrompt instance to fetch response - org_id (str): UUID of the organization - document_id (str): UUID of the document - profile_manager_id (Optional[str]): UUID of the profile manager - user_id (str): The ID of the user who uploaded the document - - - Raises: - DefaultProfileError: If no default profile is selected - AnswerFetchError: Due to failures in prompt service - - Returns: - Any: Output from LLM - """ - - # Fetch the ProfileManager instance using the profile_manager_id if provided - profile_manager = prompt.profile_manager - if profile_manager_id: - profile_manager = ProfileManagerHelper.get_profile_manager( - profile_manager_id=profile_manager_id - ) - - monitor_llm_instance: Optional[AdapterInstance] = tool.monitor_llm - monitor_llm: Optional[str] = None - challenge_llm_instance: Optional[AdapterInstance] = tool.challenge_llm - challenge_llm: Optional[str] = None - - if monitor_llm_instance: - monitor_llm = str(monitor_llm_instance.id) - else: - # Using default profile manager llm if monitor_llm is None - default_profile = ProfileManager.get_default_llm_profile(tool) - monitor_llm = str(default_profile.llm.id) - - # Using default profile manager llm if challenge_llm is None - if challenge_llm_instance: - challenge_llm = str(challenge_llm_instance.id) - else: - default_profile = ProfileManager.get_default_llm_profile(tool) - challenge_llm = str(default_profile.llm.id) - - # Need to check the user who created profile manager - PromptStudioHelper.validate_adapter_status(profile_manager) - # Need to check the user who created profile manager - # has access to adapters - PromptStudioHelper.validate_profile_manager_owner_access(profile_manager) - # Not checking reindex here as there might be - # change in Profile Manager - vector_db = str(profile_manager.vector_store.id) - embedding_model = str(profile_manager.embedding_model.id) - llm = str(profile_manager.llm.id) - x2text = str(profile_manager.x2text.id) - if not profile_manager: - raise DefaultProfileError() - index_result = PromptStudioHelper.dynamic_indexer( - profile_manager=profile_manager, - file_path=doc_path, - tool_id=str(tool.tool_id), - org_id=org_id, - document_id=document_id, - is_summary=tool.summarize_as_source, - run_id=run_id, - user_id=user_id, - process_text=process_text, - ) - if index_result.get("status") == IndexingStatus.PENDING_STATUS.value: - return { - "status": IndexingStatus.PENDING_STATUS.value, - "message": IndexingStatus.DOCUMENT_BEING_INDEXED.value, - } - tool_id = str(tool.tool_id) - output: dict[str, Any] = {} - outputs: list[dict[str, Any]] = [] - grammer_dict = {} - grammar_list = [] - # Adding validations - prompt_grammer = tool.prompt_grammer - if prompt_grammer: - for word, synonyms in prompt_grammer.items(): - synonyms = prompt_grammer[word] - grammer_dict[TSPKeys.WORD] = word - grammer_dict[TSPKeys.SYNONYMS] = synonyms - grammar_list.append(grammer_dict) - grammer_dict = {} - - output[TSPKeys.PROMPT] = prompt.prompt - output[TSPKeys.ACTIVE] = prompt.active - output[TSPKeys.CHUNK_SIZE] = profile_manager.chunk_size - output[TSPKeys.VECTOR_DB] = vector_db - output[TSPKeys.EMBEDDING] = embedding_model - output[TSPKeys.CHUNK_OVERLAP] = profile_manager.chunk_overlap - output[TSPKeys.LLM] = llm - output[TSPKeys.TYPE] = prompt.enforce_type - output[TSPKeys.NAME] = prompt.prompt_key - output[TSPKeys.RETRIEVAL_STRATEGY] = profile_manager.retrieval_strategy - output[TSPKeys.SIMILARITY_TOP_K] = profile_manager.similarity_top_k - output[TSPKeys.SECTION] = profile_manager.section - output[TSPKeys.X2TEXT_ADAPTER] = x2text - # Eval settings for the prompt - output[TSPKeys.EVAL_SETTINGS] = {} - output[TSPKeys.EVAL_SETTINGS][TSPKeys.EVAL_SETTINGS_EVALUATE] = prompt.evaluate - output[TSPKeys.EVAL_SETTINGS][TSPKeys.EVAL_SETTINGS_MONITOR_LLM] = [monitor_llm] - output[TSPKeys.EVAL_SETTINGS][ - TSPKeys.EVAL_SETTINGS_EXCLUDE_FAILED - ] = tool.exclude_failed - for attr in dir(prompt): - if attr.startswith(TSPKeys.EVAL_METRIC_PREFIX): - attr_val = getattr(prompt, attr) - output[TSPKeys.EVAL_SETTINGS][attr] = attr_val - - output = PromptStudioHelper.fetch_table_settings_if_enabled( - doc_name, prompt, org_id, user_id, tool_id, output - ) - variable_map = PromptStudioVariableService.frame_variable_replacement_map( - doc_id=document_id, prompt_object=prompt - ) - if variable_map: - output[TSPKeys.VARIABLE_MAP] = variable_map - outputs.append(output) - - tool_settings = {} - tool_settings[TSPKeys.ENABLE_CHALLENGE] = tool.enable_challenge - tool_settings[TSPKeys.CHALLENGE_LLM] = challenge_llm - tool_settings[TSPKeys.SINGLE_PASS_EXTRACTION_MODE] = ( - tool.single_pass_extraction_mode - ) - tool_settings[TSPKeys.SUMMARIZE_AS_SOURCE] = tool.summarize_as_source - tool_settings[TSPKeys.PREAMBLE] = tool.preamble - tool_settings[TSPKeys.POSTAMBLE] = tool.postamble - tool_settings[TSPKeys.GRAMMAR] = grammar_list - tool_settings[TSPKeys.ENABLE_HIGHLIGHT] = tool.enable_highlight - tool_settings[TSPKeys.PLATFORM_POSTAMBLE] = getattr( - settings, TSPKeys.PLATFORM_POSTAMBLE.upper(), "" - ) - - file_hash = ToolUtils.get_hash_from_file(file_path=doc_path) - - payload = { - TSPKeys.TOOL_SETTINGS: tool_settings, - TSPKeys.OUTPUTS: outputs, - TSPKeys.TOOL_ID: tool_id, - TSPKeys.RUN_ID: run_id, - TSPKeys.FILE_NAME: doc_name, - TSPKeys.FILE_HASH: file_hash, - Common.LOG_EVENTS_ID: StateStore.get(Common.LOG_EVENTS_ID), - } - - util = PromptIdeBaseTool(log_level=LogLevel.INFO, org_id=org_id) - - responder = PromptTool( - tool=util, - prompt_host=settings.PROMPT_HOST, - prompt_port=settings.PROMPT_PORT, - ) - include_metadata = {TSPKeys.INCLUDE_METADATA: True} - - answer = responder.answer_prompt(payload, include_metadata) - # TODO: Make use of dataclasses - if answer["status"] == "ERROR": - # TODO: Publish to FE logs from here - error_message = answer.get("error", "") - raise AnswerFetchError( - "Error while fetching response for " - f"'{prompt.prompt_key}' with '{doc_name}'. {error_message}" - ) - output_response = json.loads(answer["structure_output"]) - return output_response - - @staticmethod - def fetch_table_settings_if_enabled( - doc_name: str, - prompt: ToolStudioPrompt, - org_id: str, - user_id: str, - tool_id: str, - output: dict[str, Any], - ) -> dict[str, Any]: - - if ( - prompt.enforce_type == TSPKeys.TABLE - or prompt.enforce_type == TSPKeys.RECORD - ): - extract_doc_path: str = ( - PromptStudioHelper._get_extract_or_summary_document_path( - org_id, user_id, tool_id, doc_name, TSPKeys.EXTRACT - ) - ) - for modifier_plugin in modifier_plugins: - cls = modifier_plugin[ModifierConfig.METADATA][ - ModifierConfig.METADATA_SERVICE_CLASS - ] - output = cls.update( - output=output, - tool_id=tool_id, - prompt_id=str(prompt.prompt_id), - prompt=prompt.prompt, - input_file=extract_doc_path, - clean_pages=True, - ) - - return output - - @staticmethod - def dynamic_indexer( - profile_manager: ProfileManager, - tool_id: str, - file_path: str, - org_id: str, - document_id: str, - user_id: str, - is_summary: bool = False, - reindex: bool = False, - run_id: str = None, - process_text: Optional[Callable[[str], str]] = None, - ) -> Any: - """Used to index a file based on the passed arguments. - - This is useful when a file needs to be indexed dynamically as the - parameters meant for indexing changes. The file - - Args: - profile_manager (ProfileManager): Profile manager instance that hold - values such as chunk size, chunk overlap and adapter IDs - tool_id (str): UUID of the prompt studio tool - file_path (str): Path to the file that needs to be indexed - org_id (str): ID of the organization - is_summary (bool, optional): Flag to ensure if extracted contents - need to be persisted. Defaults to False. - user_id (str): The ID of the user who uploaded the document - - Returns: - str: Index key for the combination of arguments - """ - embedding_model = str(profile_manager.embedding_model.id) - vector_db = str(profile_manager.vector_store.id) - x2text_adapter = str(profile_manager.x2text.id) - extract_file_path: Optional[str] = None - - directory, filename = os.path.split(file_path) - if not is_summary: - extract_file_path = os.path.join( - directory, "extract", os.path.splitext(filename)[0] + ".txt" - ) - else: - profile_manager.chunk_size = 0 - - try: - - usage_kwargs = {"run_id": run_id} - # Orginal file name with which file got uploaded in prompt studio - usage_kwargs["file_name"] = filename - util = PromptIdeBaseTool(log_level=LogLevel.INFO, org_id=org_id) - tool_index = Index(tool=util) - doc_id_key = tool_index.generate_index_key( - vector_db=vector_db, - embedding=embedding_model, - x2text=x2text_adapter, - chunk_size=str(profile_manager.chunk_size), - chunk_overlap=str(profile_manager.chunk_overlap), - file_path=file_path, - file_hash=None, - ) - if not reindex: - indexed_doc_id = DocumentIndexingService.get_indexed_document_id( - org_id=org_id, user_id=user_id, doc_id_key=doc_id_key - ) - if indexed_doc_id: - return { - "status": IndexingStatus.COMPLETED_STATUS.value, - "output": indexed_doc_id, - } - # Polling if document is already being indexed - if DocumentIndexingService.is_document_indexing( - org_id=org_id, user_id=user_id, doc_id_key=doc_id_key - ): - return { - "status": IndexingStatus.PENDING_STATUS.value, - "output": IndexingStatus.DOCUMENT_BEING_INDEXED.value, - } - - # Set the document as being indexed - DocumentIndexingService.set_document_indexing( - org_id=org_id, user_id=user_id, doc_id_key=doc_id_key - ) - doc_id: str = tool_index.index( - tool_id=tool_id, - embedding_instance_id=embedding_model, - vector_db_instance_id=vector_db, - x2text_instance_id=x2text_adapter, - file_path=file_path, - chunk_size=profile_manager.chunk_size, - chunk_overlap=profile_manager.chunk_overlap, - reindex=reindex, - output_file_path=extract_file_path, - usage_kwargs=usage_kwargs.copy(), - process_text=process_text, - ) - - PromptStudioIndexHelper.handle_index_manager( - document_id=document_id, - is_summary=is_summary, - profile_manager=profile_manager, - doc_id=doc_id, - ) - DocumentIndexingService.mark_document_indexed( - org_id=org_id, user_id=user_id, doc_id_key=doc_id_key, doc_id=doc_id - ) - return {"status": IndexingStatus.COMPLETED_STATUS.value, "output": doc_id} - except (IndexingError, IndexingAPIError, SdkError) as e: - doc_name = os.path.split(file_path)[1] - PromptStudioHelper._publish_log( - {"tool_id": tool_id, "run_id": run_id, "doc_name": doc_name}, - LogLevels.ERROR, - LogLevels.RUN, - f"Indexing failed : {e}", - ) - raise IndexingAPIError( - f"Error while indexing '{doc_name}'. {str(e)}" - ) from e - - @staticmethod - def _fetch_single_pass_response( - tool: CustomTool, - file_path: str, - doc_name: str, - prompts: list[ToolStudioPrompt], - org_id: str, - user_id: str, - document_id: str, - run_id: str = None, - process_text: Optional[Callable[[str], str]] = None, - ) -> Any: - tool_id: str = str(tool.tool_id) - outputs: list[dict[str, Any]] = [] - grammar: list[dict[str, Any]] = [] - prompt_grammar = tool.prompt_grammer - default_profile = ProfileManager.get_default_llm_profile(tool) - challenge_llm_instance: Optional[AdapterInstance] = tool.challenge_llm - challenge_llm: Optional[str] = None - # Using default profile manager llm if challenge_llm is None - if challenge_llm_instance: - challenge_llm = str(challenge_llm_instance.id) - else: - challenge_llm = str(default_profile.llm.id) - # Need to check the user who created profile manager - PromptStudioHelper.validate_adapter_status(default_profile) - # has access to adapters configured in profile manager - PromptStudioHelper.validate_profile_manager_owner_access(default_profile) - default_profile.chunk_size = 0 # To retrive full context - - if prompt_grammar: - for word, synonyms in prompt_grammar.items(): - grammar.append({TSPKeys.WORD: word, TSPKeys.SYNONYMS: synonyms}) - - if not default_profile: - raise DefaultProfileError() - - index_result = PromptStudioHelper.dynamic_indexer( - profile_manager=default_profile, - file_path=file_path, - tool_id=tool_id, - org_id=org_id, - is_summary=tool.summarize_as_source, - document_id=document_id, - run_id=run_id, - user_id=user_id, - process_text=process_text, - ) - if index_result.get("status") == IndexingStatus.PENDING_STATUS.value: - return { - "status": IndexingStatus.PENDING_STATUS.value, - "message": IndexingStatus.DOCUMENT_BEING_INDEXED.value, - } - - vector_db = str(default_profile.vector_store.id) - embedding_model = str(default_profile.embedding_model.id) - llm = str(default_profile.llm.id) - x2text = str(default_profile.x2text.id) - tool_settings = {} - tool_settings[TSPKeys.PREAMBLE] = tool.preamble - tool_settings[TSPKeys.POSTAMBLE] = tool.postamble - tool_settings[TSPKeys.GRAMMAR] = grammar - tool_settings[TSPKeys.LLM] = llm - tool_settings[TSPKeys.X2TEXT_ADAPTER] = x2text - tool_settings[TSPKeys.VECTOR_DB] = vector_db - tool_settings[TSPKeys.EMBEDDING] = embedding_model - tool_settings[TSPKeys.CHUNK_SIZE] = default_profile.chunk_size - tool_settings[TSPKeys.CHUNK_OVERLAP] = default_profile.chunk_overlap - tool_settings[TSPKeys.ENABLE_CHALLENGE] = tool.enable_challenge - tool_settings[TSPKeys.ENABLE_HIGHLIGHT] = tool.enable_highlight - tool_settings[TSPKeys.CHALLENGE_LLM] = challenge_llm - - for prompt in prompts: - if not prompt.prompt: - raise EmptyPromptError() - output: dict[str, Any] = {} - output[TSPKeys.PROMPT] = prompt.prompt - output[TSPKeys.ACTIVE] = prompt.active - output[TSPKeys.TYPE] = prompt.enforce_type - output[TSPKeys.NAME] = prompt.prompt_key - outputs.append(output) - - if tool.summarize_as_source: - path = Path(file_path) - file_path = str(path.parent / TSPKeys.SUMMARIZE / (path.stem + ".txt")) - file_hash = ToolUtils.get_hash_from_file(file_path=file_path) - - payload = { - TSPKeys.TOOL_SETTINGS: tool_settings, - TSPKeys.OUTPUTS: outputs, - TSPKeys.TOOL_ID: tool_id, - TSPKeys.RUN_ID: run_id, - TSPKeys.FILE_HASH: file_hash, - TSPKeys.FILE_NAME: doc_name, - Common.LOG_EVENTS_ID: StateStore.get(Common.LOG_EVENTS_ID), - } - - util = PromptIdeBaseTool(log_level=LogLevel.INFO, org_id=org_id) - - responder = PromptTool( - tool=util, - prompt_host=settings.PROMPT_HOST, - prompt_port=settings.PROMPT_PORT, - ) - include_metadata = {TSPKeys.INCLUDE_METADATA: True} - - answer = responder.single_pass_extraction(payload, include_metadata) - # TODO: Make use of dataclasses - if answer["status"] == "ERROR": - error_message = answer.get("error", None) - raise AnswerFetchError( - f"Error while fetching response for prompt(s). {error_message}" - ) - output_response = json.loads(answer["structure_output"]) - return output_response - - @staticmethod - def get_tool_from_tool_id(tool_id: str) -> Optional[CustomTool]: - try: - tool: CustomTool = CustomTool.objects.get(tool_id=tool_id) - return tool - except CustomTool.DoesNotExist: - return None diff --git a/backend/prompt_studio/prompt_studio_core/prompt_variable_service.py b/backend/prompt_studio/prompt_studio_core/prompt_variable_service.py deleted file mode 100644 index d098c01fb..000000000 --- a/backend/prompt_studio/prompt_studio_core/prompt_variable_service.py +++ /dev/null @@ -1,93 +0,0 @@ -import re -from enum import Enum -from typing import Any - -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio_core.exceptions import PromptNotRun -from prompt_studio.prompt_studio_output_manager.models import PromptStudioOutputManager - - -class VariableType(str, Enum): - STATIC = "STATIC" - DYNAMIC = "DYNAMIC" - - -class VariableConstants: - - VARIABLE_REGEX = "{{(.+?)}}" - DYNAMIC_VARIABLE_DATA_REGEX = r"\[(.*?)\]" - DYNAMIC_VARIABLE_URL_REGEX = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))" # noqa: E501 - - -class PromptStudioVariableService: - - @staticmethod - def fetch_variable_outputs(variable: str, doc_id: str, tool_id: str) -> Any: - variable_prompt: ToolStudioPrompt = ToolStudioPrompt.objects.get( - prompt_key=variable, tool_id=tool_id - ) - try: - output = PromptStudioOutputManager.objects.get( - prompt_id=variable_prompt.prompt_id, - document_manager=doc_id, - tool_id=variable_prompt.tool_id, - profile_manager=variable_prompt.profile_manager, - is_single_pass_extract=False, - ) - except PromptStudioOutputManager.DoesNotExist: - raise PromptNotRun( - f"The prompt : {variable} must be executed before " - "it can be used as a variable in another prompt. " - "Please execute the prompt first and try again." - ) - return output.output - - @staticmethod - def identify_variable_type(variable: str) -> VariableType: - variable_type: VariableType - pattern = re.compile(VariableConstants.DYNAMIC_VARIABLE_URL_REGEX) - if re.findall(pattern, variable): - variable_type = VariableType.DYNAMIC - else: - variable_type = VariableType.STATIC - return variable_type - - @staticmethod - def extract_variables_from_prompt(prompt: str) -> list[str]: - variable: list[str] = [] - variable = re.findall(VariableConstants.VARIABLE_REGEX, prompt) - return variable - - @staticmethod - def frame_variable_replacement_map( - doc_id: str, prompt_object: ToolStudioPrompt - ) -> dict[str, Any]: - variable_output_map: dict[str, Any] = {} - prompt = prompt_object.prompt - variables = PromptStudioVariableService.extract_variables_from_prompt( - prompt=prompt - ) - for variable in variables: - variable_type: VariableType = ( - PromptStudioVariableService.identify_variable_type(variable=variable) - ) - if variable_type == VariableType.STATIC: - variable_output_map[variable] = ( - PromptStudioVariableService.fetch_variable_outputs( - variable=variable, - doc_id=doc_id, - tool_id=prompt_object.tool_id.tool_id, - ) - ) - if variable_type == VariableType.DYNAMIC: - variable = re.findall( - VariableConstants.DYNAMIC_VARIABLE_DATA_REGEX, variable - )[0] - variable_output_map[variable] = ( - PromptStudioVariableService.fetch_variable_outputs( - variable=variable, - doc_id=doc_id, - tool_id=prompt_object.tool_id.tool_id, - ) - ) - return variable_output_map diff --git a/backend/prompt_studio/prompt_studio_core/serializers.py b/backend/prompt_studio/prompt_studio_core/serializers.py deleted file mode 100644 index 595a45f2a..000000000 --- a/backend/prompt_studio/prompt_studio_core/serializers.py +++ /dev/null @@ -1,116 +0,0 @@ -import logging -from typing import Any - -from account.models import User -from account.serializer import UserSerializer -from django.core.exceptions import ObjectDoesNotExist -from file_management.constants import FileInformationKey -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio.serializers import ToolStudioPromptSerializer -from prompt_studio.prompt_studio_core.constants import ToolStudioKeys as TSKeys -from prompt_studio.prompt_studio_core.exceptions import DefaultProfileError -from rest_framework import serializers -from utils.FileValidator import FileValidator - -from backend.serializers import AuditSerializer - -from .models import CustomTool - -logger = logging.getLogger(__name__) - - -class CustomToolSerializer(AuditSerializer): - shared_users = serializers.PrimaryKeyRelatedField( - queryset=User.objects.all(), required=False, allow_null=True, many=True - ) - - class Meta: - model = CustomTool - fields = "__all__" - - def to_representation(self, instance): # type: ignore - data = super().to_representation(instance) - try: - profile_manager = ProfileManager.objects.get( - prompt_studio_tool=instance, is_summarize_llm=True - ) - data[TSKeys.SUMMARIZE_LLM_PROFILE] = profile_manager.profile_id - except ObjectDoesNotExist: - logger.info( - "Summarize LLM profile doesnt exist for prompt tool %s", - str(instance.tool_id), - ) - try: - profile_manager = ProfileManager.get_default_llm_profile(instance) - data[TSKeys.DEFAULT_PROFILE] = profile_manager.profile_id - except DefaultProfileError: - logger.info( - "Default LLM profile doesnt exist for prompt tool %s", - str(instance.tool_id), - ) - try: - prompt_instance: ToolStudioPrompt = ToolStudioPrompt.objects.filter( - tool_id=data.get(TSKeys.TOOL_ID) - ).order_by("sequence_number") - data[TSKeys.PROMPTS] = [] - output: list[Any] = [] - # Appending prompt instances of the tool for FE Processing - if prompt_instance.count() != 0: - for prompt in prompt_instance: - prompt_serializer = ToolStudioPromptSerializer(prompt) - output.append(prompt_serializer.data) - data[TSKeys.PROMPTS] = output - except Exception as e: - logger.error(f"Error occured while appending prompts {e}") - return data - - data["created_by_email"] = instance.created_by.email - - return data - - -class PromptStudioIndexSerializer(serializers.Serializer): - document_id = serializers.CharField() - - -class PromptStudioResponseSerializer(serializers.Serializer): - file_name = serializers.CharField() - tool_id = serializers.CharField() - id = serializers.CharField() - - -class SharedUserListSerializer(serializers.ModelSerializer): - """Used for listing users of Custom tool.""" - - created_by = UserSerializer() - shared_users = UserSerializer(many=True) - - class Meta: - model = CustomTool - fields = ( - "tool_id", - "tool_name", - "created_by", - "shared_users", - ) - - -class FileInfoIdeSerializer(serializers.Serializer): - document_id = serializers.CharField() - view_type = serializers.CharField(required=False) - - -class FileUploadIdeSerializer(serializers.Serializer): - file = serializers.ListField( - child=serializers.FileField(), - required=True, - validators=[ - FileValidator( - allowed_extensions=FileInformationKey.FILE_UPLOAD_ALLOWED_EXT, - allowed_mimetypes=FileInformationKey.FILE_UPLOAD_ALLOWED_MIME, - min_size=0, - max_size=FileInformationKey.FILE_UPLOAD_MAX_SIZE, - ) - ], - ) diff --git a/backend/prompt_studio/prompt_studio_core/static/select_choices.json b/backend/prompt_studio/prompt_studio_core/static/select_choices.json deleted file mode 100644 index f9e002f7d..000000000 --- a/backend/prompt_studio/prompt_studio_core/static/select_choices.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "combined_output": - { - "JSON":"JSON", - "YAML":"YAML" - }, - "choose_llm":{ - "AZURE":"Azure OpenAI" - }, - "output_type":{ - "string":"Text", - "number":"number", - "email":"email", - "date":"date", - "boolean":"boolean", - "json":"json", - "table":"table", - "record":"record" - }, - "output_processing":{ - "DEFAULT":"Default" - }, - "embedding":{ - "azure_openai_embedding":"azure_openai_embedding", - "openai_embedding":"openai_embedding" - }, - "retrieval_strategy":{ - "simple":"simple", - "subquestion":"subquestion" - }, - "vector_store":{ - "Postgres pg_vector":"Postgres pg_vector", - "qdrant":"qdrant" - } -} diff --git a/backend/prompt_studio/prompt_studio_core/urls.py b/backend/prompt_studio/prompt_studio_core/urls.py deleted file mode 100644 index 3a32f63c4..000000000 --- a/backend/prompt_studio/prompt_studio_core/urls.py +++ /dev/null @@ -1,114 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns - -from .views import PromptStudioCoreView - -prompt_studio_list = PromptStudioCoreView.as_view({"get": "list", "post": "create"}) -prompt_studio_detail = PromptStudioCoreView.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } -) -prompt_studio_choices = PromptStudioCoreView.as_view({"get": "get_select_choices"}) -prompt_studio_profiles = PromptStudioCoreView.as_view( - {"get": "list_profiles", "patch": "make_profile_default"} -) - -prompt_studio_prompts = PromptStudioCoreView.as_view({"post": "create_prompt"}) - -prompt_studio_profilemanager = PromptStudioCoreView.as_view( - {"post": "create_profile_manager"} -) - -prompt_studio_prompt_index = PromptStudioCoreView.as_view({"post": "index_document"}) -prompt_studio_prompt_response = PromptStudioCoreView.as_view({"post": "fetch_response"}) -prompt_studio_adapter_choices = PromptStudioCoreView.as_view( - {"get": "get_adapter_choices"} -) -prompt_studio_single_pass_extraction = PromptStudioCoreView.as_view( - {"post": "single_pass_extraction"} -) -prompt_studio_users = PromptStudioCoreView.as_view({"get": "list_of_shared_users"}) - - -prompt_studio_file = PromptStudioCoreView.as_view( - { - "post": "upload_for_ide", - "get": "fetch_contents_ide", - "delete": "delete_for_ide", - } -) - -prompt_studio_export = PromptStudioCoreView.as_view( - {"post": "export_tool", "get": "export_tool_info"} -) - - -urlpatterns = format_suffix_patterns( - [ - path("prompt-studio/", prompt_studio_list, name="prompt-studio-list"), - path( - "prompt-studio//", - prompt_studio_detail, - name="tool-studio-detail", - ), - path( - "prompt-studio/select_choices/", - prompt_studio_choices, - name="prompt-studio-choices", - ), - path( - "prompt-studio/prompt-studio-profile//", - prompt_studio_profiles, - name="prompt-studio-profiles", - ), - path( - "prompt-studio/prompt-studio-prompt//", - prompt_studio_prompts, - name="prompt-studio-prompts", - ), - path( - "prompt-studio/profilemanager/", - prompt_studio_profilemanager, - name="prompt-studio-profilemanager", - ), - path( - "prompt-studio/index-document/", - prompt_studio_prompt_index, - name="prompt-studio-prompt-index", - ), - path( - "prompt-studio/fetch_response/", - prompt_studio_prompt_response, - name="prompt-studio-prompt-response", - ), - path( - "prompt-studio/adapter-choices/", - prompt_studio_adapter_choices, - name="prompt-studio-adapter-choices", - ), - path( - "prompt-studio/single-pass-extraction/", - prompt_studio_single_pass_extraction, - name="prompt-studio-single-pass-extraction", - ), - path( - "prompt-studio/users/", - prompt_studio_users, - name="prompt-studio-users", - ), - path( - "prompt-studio/file/", - prompt_studio_file, - name="prompt_studio_file", - ), - path( - "prompt-studio/export/", - prompt_studio_export, - name="prompt_studio_export", - ), - ] -) diff --git a/backend/prompt_studio/prompt_studio_core/views.py b/backend/prompt_studio/prompt_studio_core/views.py deleted file mode 100644 index afcd92b16..000000000 --- a/backend/prompt_studio/prompt_studio_core/views.py +++ /dev/null @@ -1,556 +0,0 @@ -import logging -import uuid -from typing import Any, Optional - -from account.custom_exceptions import DuplicateData -from django.db import IntegrityError -from django.db.models import QuerySet -from django.http import HttpRequest -from file_management.exceptions import FileNotFound -from file_management.file_management_helper import FileManagerHelper -from permissions.permission import IsOwner, IsOwnerOrSharedUser -from prompt_studio.processor_loader import get_plugin_class_by_name, load_plugins -from prompt_studio.prompt_profile_manager.constants import ( - ProfileManagerErrors, - ProfileManagerKeys, -) -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_profile_manager.serializers import ProfileManagerSerializer -from prompt_studio.prompt_studio.constants import ToolStudioPromptErrors -from prompt_studio.prompt_studio.serializers import ToolStudioPromptSerializer -from prompt_studio.prompt_studio_core.constants import ( - FileViewTypes, - ToolStudioErrors, - ToolStudioKeys, - ToolStudioPromptKeys, -) -from prompt_studio.prompt_studio_core.document_indexing_service import ( - DocumentIndexingService, -) -from prompt_studio.prompt_studio_core.exceptions import ( - IndexingAPIError, - MaxProfilesReachedError, - ToolDeleteError, -) -from prompt_studio.prompt_studio_core.prompt_studio_helper import PromptStudioHelper -from prompt_studio.prompt_studio_document_manager.models import DocumentManager -from prompt_studio.prompt_studio_document_manager.prompt_studio_document_helper import ( # noqa: E501 - PromptStudioDocumentHelper, -) -from prompt_studio.prompt_studio_index_manager.models import IndexManager -from prompt_studio.prompt_studio_registry.prompt_studio_registry_helper import ( - PromptStudioRegistryHelper, -) -from prompt_studio.prompt_studio_registry.serializers import ( - ExportToolRequestSerializer, - PromptStudioRegistryInfoSerializer, -) -from rest_framework import status, viewsets -from rest_framework.decorators import action -from rest_framework.request import Request -from rest_framework.response import Response -from rest_framework.versioning import URLPathVersioning -from tool_instance.models import ToolInstance -from unstract.sdk.utils.common_utils import CommonUtils -from utils.user_session import UserSessionUtils - -from unstract.connectors.filesystems.local_storage.local_storage import LocalStorageFS - -from .models import CustomTool -from .serializers import ( - CustomToolSerializer, - FileInfoIdeSerializer, - FileUploadIdeSerializer, - PromptStudioIndexSerializer, - SharedUserListSerializer, -) - -logger = logging.getLogger(__name__) - - -class PromptStudioCoreView(viewsets.ModelViewSet): - """Viewset to handle all Custom tool related operations.""" - - versioning_class = URLPathVersioning - - serializer_class = CustomToolSerializer - - processor_plugins = load_plugins() - - def get_permissions(self) -> list[Any]: - if self.action == "destroy": - return [IsOwner()] - - return [IsOwnerOrSharedUser()] - - def get_queryset(self) -> Optional[QuerySet]: - return CustomTool.objects.for_user(self.request.user) - - def create(self, request: HttpRequest) -> Response: - serializer = self.get_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - try: - self.perform_create(serializer) - except IntegrityError: - raise DuplicateData( - f"{ToolStudioErrors.TOOL_NAME_EXISTS}, \ - {ToolStudioErrors.DUPLICATE_API}" - ) - PromptStudioHelper.create_default_profile_manager( - request.user, serializer.data["tool_id"] - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - def perform_destroy(self, instance: CustomTool) -> None: - organization_id = UserSessionUtils.get_organization_id(self.request) - instance.delete(organization_id) - - def destroy( - self, request: Request, *args: tuple[Any], **kwargs: dict[str, Any] - ) -> Response: - instance: CustomTool = self.get_object() - # Checks if tool is exported - if hasattr(instance, "prompt_studio_registry"): - exported_tool_instances_in_use = ToolInstance.objects.filter( - tool_id__exact=instance.prompt_studio_registry.pk - ) - dependent_wfs = set() - for tool_instance in exported_tool_instances_in_use: - dependent_wfs.add(tool_instance.workflow_id) - if len(dependent_wfs) > 0: - logger.info( - f"Cannot destroy custom tool {instance.tool_id}," - f" depended by workflows {dependent_wfs}" - ) - raise ToolDeleteError( - "Failed to delete tool, its used in other workflows. " - "Delete its usages first" - ) - return super().destroy(request, *args, **kwargs) - - def partial_update( - self, request: Request, *args: tuple[Any], **kwargs: dict[str, Any] - ) -> Response: - summarize_llm_profile_id = request.data.get( - ToolStudioKeys.SUMMARIZE_LLM_PROFILE, None - ) - if summarize_llm_profile_id: - prompt_tool = self.get_object() - - ProfileManager.objects.filter(prompt_studio_tool=prompt_tool).update( - is_summarize_llm=False - ) - profile_manager = ProfileManager.objects.get(pk=summarize_llm_profile_id) - profile_manager.is_summarize_llm = True - profile_manager.save() - - return super().partial_update(request, *args, **kwargs) - - @action(detail=True, methods=["get"]) - def get_select_choices(self, request: HttpRequest) -> Response: - """Method to return all static dropdown field values. - - The field values are retrieved from `./static/select_choices.json`. - - Returns: - Response: Reponse of dropdown dict - """ - try: - select_choices: dict[str, Any] = PromptStudioHelper.get_select_fields() - return Response(select_choices, status=status.HTTP_200_OK) - except Exception as e: - logger.error(f"Error occured while fetching select fields {e}") - return Response(select_choices, status=status.HTTP_204_NO_CONTENT) - - @action(detail=True, methods=["get"]) - def list_profiles(self, request: HttpRequest, pk: Any = None) -> Response: - prompt_tool = ( - self.get_object() - ) # Assuming you have a get_object method in your viewset - - profile_manager_instances = ProfileManager.objects.filter( - prompt_studio_tool=prompt_tool - ) - - serialized_instances = ProfileManagerSerializer( - profile_manager_instances, many=True - ).data - - return Response(serialized_instances) - - @action(detail=True, methods=["patch"]) - def make_profile_default(self, request: HttpRequest, pk: Any = None) -> Response: - prompt_tool = ( - self.get_object() - ) # Assuming you have a get_object method in your viewset - - ProfileManager.objects.filter(prompt_studio_tool=prompt_tool).update( - is_default=False - ) - - profile_manager = ProfileManager.objects.get(pk=request.data["default_profile"]) - profile_manager.is_default = True - profile_manager.save() - - return Response( - status=status.HTTP_200_OK, - data={"default_profile": profile_manager.profile_id}, - ) - - @action(detail=True, methods=["post"]) - def index_document(self, request: HttpRequest, pk: Any = None) -> Response: - """API Entry point method to index input file. - - Args: - request (HttpRequest) - - Raises: - IndexingError - ValidationError - - Returns: - Response - """ - tool = self.get_object() - serializer = PromptStudioIndexSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - document_id: str = serializer.validated_data.get( - ToolStudioPromptKeys.DOCUMENT_ID - ) - document: DocumentManager = DocumentManager.objects.get(pk=document_id) - file_name: str = document.document_name - text_processor = get_plugin_class_by_name( - name="text_processor", - plugins=self.processor_plugins, - ) - # Generate a run_id - run_id = CommonUtils.generate_uuid() - unique_id = PromptStudioHelper.index_document( - tool_id=str(tool.tool_id), - file_name=file_name, - org_id=UserSessionUtils.get_organization_id(request), - user_id=tool.created_by.user_id, - document_id=document_id, - run_id=run_id, - text_processor=text_processor, - ) - - usage_kwargs: dict[Any, Any] = dict() - usage_kwargs[ToolStudioPromptKeys.RUN_ID] = run_id - cls = get_plugin_class_by_name( - name="summarizer", - plugins=self.processor_plugins, - ) - if cls: - cls.process( - tool_id=str(tool.tool_id), - file_name=file_name, - org_id=UserSessionUtils.get_organization_id(request), - user_id=tool.created_by.user_id, - document_id=document_id, - usage_kwargs=usage_kwargs.copy(), - ) - - if unique_id: - return Response( - {"message": "Document indexed successfully."}, - status=status.HTTP_200_OK, - ) - else: - logger.error("Error occured while indexing. Unique ID is not valid.") - raise IndexingAPIError() - - @action(detail=True, methods=["post"]) - def fetch_response(self, request: HttpRequest, pk: Any = None) -> Response: - """API Entry point method to fetch response to prompt. - - Args: - request (HttpRequest): _description_ - - Raises: - FilenameMissingError: _description_ - - Returns: - Response - """ - custom_tool = self.get_object() - tool_id: str = str(custom_tool.tool_id) - document_id: str = request.data.get(ToolStudioPromptKeys.DOCUMENT_ID) - id: str = request.data.get(ToolStudioPromptKeys.ID) - run_id: str = request.data.get(ToolStudioPromptKeys.RUN_ID) - profile_manager: str = request.data.get(ToolStudioPromptKeys.PROFILE_MANAGER_ID) - if not run_id: - # Generate a run_id - run_id = CommonUtils.generate_uuid() - text_processor = get_plugin_class_by_name( - name="text_processor", - plugins=self.processor_plugins, - ) - response: dict[str, Any] = PromptStudioHelper.prompt_responder( - id=id, - tool_id=tool_id, - org_id=UserSessionUtils.get_organization_id(request), - user_id=custom_tool.created_by.user_id, - document_id=document_id, - run_id=run_id, - profile_manager_id=profile_manager, - text_processor=text_processor, - ) - return Response(response, status=status.HTTP_200_OK) - - @action(detail=True, methods=["post"]) - def single_pass_extraction(self, request: HttpRequest, pk: uuid) -> Response: - """API Entry point method to fetch response to prompt. - - Args: - request (HttpRequest): _description_ - pk (Any): Primary key of the CustomTool - - Returns: - Response - """ - # TODO: Handle fetch_response and single_pass_ - # extraction using common function - custom_tool = self.get_object() - tool_id: str = str(custom_tool.tool_id) - document_id: str = request.data.get(ToolStudioPromptKeys.DOCUMENT_ID) - run_id: str = request.data.get(ToolStudioPromptKeys.RUN_ID) - if not run_id: - # Generate a run_id - run_id = CommonUtils.generate_uuid() - text_processor = get_plugin_class_by_name( - name="text_processor", - plugins=self.processor_plugins, - ) - response: dict[str, Any] = PromptStudioHelper.prompt_responder( - tool_id=tool_id, - org_id=UserSessionUtils.get_organization_id(request), - user_id=custom_tool.created_by.user_id, - document_id=document_id, - run_id=run_id, - text_processor=text_processor, - ) - return Response(response, status=status.HTTP_200_OK) - - @action(detail=True, methods=["get"]) - def list_of_shared_users(self, request: HttpRequest, pk: Any = None) -> Response: - - custom_tool = ( - self.get_object() - ) # Assuming you have a get_object method in your viewset - - serialized_instances = SharedUserListSerializer(custom_tool).data - - return Response(serialized_instances) - - @action(detail=True, methods=["post"]) - def create_prompt(self, request: HttpRequest, pk: Any = None) -> Response: - context = super().get_serializer_context() - serializer = ToolStudioPromptSerializer(data=request.data, context=context) - serializer.is_valid(raise_exception=True) - try: - # serializer.save() - self.perform_create(serializer) - except IntegrityError: - raise DuplicateData( - f"{ToolStudioPromptErrors.PROMPT_NAME_EXISTS}, \ - {ToolStudioPromptErrors.DUPLICATE_API}" - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - @action(detail=True, methods=["post"]) - def create_profile_manager(self, request: HttpRequest, pk: Any = None) -> Response: - context = super().get_serializer_context() - serializer = ProfileManagerSerializer(data=request.data, context=context) - - serializer.is_valid(raise_exception=True) - # Check for the maximum number of profiles constraint - prompt_studio_tool = serializer.validated_data[ - ProfileManagerKeys.PROMPT_STUDIO_TOOL - ] - profile_count = ProfileManager.objects.filter( - prompt_studio_tool=prompt_studio_tool - ).count() - - if profile_count >= ProfileManagerKeys.MAX_PROFILE_COUNT: - raise MaxProfilesReachedError() - try: - self.perform_create(serializer) - except IntegrityError: - raise DuplicateData( - f"{ProfileManagerErrors.PROFILE_NAME_EXISTS}, \ - {ProfileManagerErrors.DUPLICATE_API}" - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - @action(detail=True, methods=["get"]) - def fetch_contents_ide(self, request: HttpRequest, pk: Any = None) -> Response: - custom_tool = self.get_object() - serializer = FileInfoIdeSerializer(data=request.GET) - serializer.is_valid(raise_exception=True) - document_id: str = serializer.validated_data.get("document_id") - document: DocumentManager = DocumentManager.objects.get(pk=document_id) - file_name: str = document.document_name - view_type: str = serializer.validated_data.get("view_type") - - filename_without_extension = file_name.rsplit(".", 1)[0] - if view_type == FileViewTypes.EXTRACT: - file_name = ( - f"{FileViewTypes.EXTRACT.lower()}/" f"{filename_without_extension}.txt" - ) - if view_type == FileViewTypes.SUMMARIZE: - file_name = ( - f"{FileViewTypes.SUMMARIZE.lower()}/" - f"{filename_without_extension}.txt" - ) - - file_path = file_path = FileManagerHelper.handle_sub_directory_for_tenants( - UserSessionUtils.get_organization_id(request), - is_create=True, - user_id=custom_tool.created_by.user_id, - tool_id=str(custom_tool.tool_id), - ) - file_system = LocalStorageFS(settings={"path": file_path}) - if not file_path.endswith("/"): - file_path += "/" - file_path += file_name - # Temporary Hack for frictionless onboarding as the user id will be empty - try: - contents = FileManagerHelper.fetch_file_contents(file_system, file_path) - except FileNotFound: - file_path = file_path = FileManagerHelper.handle_sub_directory_for_tenants( - UserSessionUtils.get_organization_id(request), - is_create=True, - user_id="", - tool_id=str(custom_tool.tool_id), - ) - if not file_path.endswith("/"): - file_path += "/" - file_path += file_name - contents = FileManagerHelper.fetch_file_contents(file_system, file_path) - - return Response({"data": contents}, status=status.HTTP_200_OK) - - @action(detail=True, methods=["post"]) - def upload_for_ide(self, request: HttpRequest, pk: Any = None) -> Response: - custom_tool = self.get_object() - serializer = FileUploadIdeSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - uploaded_files: Any = serializer.validated_data.get("file") - - file_path = FileManagerHelper.handle_sub_directory_for_tenants( - UserSessionUtils.get_organization_id(request), - is_create=True, - user_id=custom_tool.created_by.user_id, - tool_id=str(custom_tool.tool_id), - ) - file_system = LocalStorageFS(settings={"path": file_path}) - - documents = [] - for uploaded_file in uploaded_files: - file_name = uploaded_file.name - - # Create a record in the db for the file - document = PromptStudioDocumentHelper.create( - tool_id=str(custom_tool.tool_id), document_name=file_name - ) - # Create a dictionary to store document data - doc = { - "document_id": document.document_id, - "document_name": document.document_name, - "tool": document.tool.tool_id, - } - # Store file - logger.info( - f"Uploading file: {file_name}" if file_name else "Uploading file" - ) - FileManagerHelper.upload_file( - file_system, - file_path, - uploaded_file, - file_name, - ) - documents.append(doc) - return Response({"data": documents}) - - @action(detail=True, methods=["delete"]) - def delete_for_ide(self, request: HttpRequest, pk: uuid) -> Response: - custom_tool = self.get_object() - serializer = FileInfoIdeSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - document_id: str = serializer.validated_data.get( - ToolStudioPromptKeys.DOCUMENT_ID - ) - org_id = UserSessionUtils.get_organization_id(request) - user_id = custom_tool.created_by.user_id - document: DocumentManager = DocumentManager.objects.get(pk=document_id) - file_name: str = document.document_name - file_path = FileManagerHelper.handle_sub_directory_for_tenants( - org_id=org_id, - is_create=False, - user_id=user_id, - tool_id=str(custom_tool.tool_id), - ) - path = file_path - file_system = LocalStorageFS(settings={"path": path}) - try: - # Delete indexed flags in redis - index_managers = IndexManager.objects.filter(document_manager=document_id) - for index_manager in index_managers: - raw_index_id = index_manager.raw_index_id - DocumentIndexingService.remove_document_indexing( - org_id=org_id, user_id=user_id, doc_id_key=raw_index_id - ) - # Delete the document record - document.delete() - # Delete the files - FileManagerHelper.delete_file(file_system, path, file_name) - # Directories to delete the text files - directories = ["extract/", "extract/metadata/", "summarize/"] - FileManagerHelper.delete_related_files( - file_system, path, file_name, directories - ) - return Response( - {"data": "File deleted succesfully."}, - status=status.HTTP_200_OK, - ) - except Exception as exc: - logger.error(f"Exception thrown from file deletion, error {exc}") - return Response( - {"data": "File deletion failed."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - @action(detail=True, methods=["post"]) - def export_tool(self, request: Request, pk: Any = None) -> Response: - """API Endpoint for exporting required jsons for the custom tool.""" - custom_tool = self.get_object() - serializer = ExportToolRequestSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - is_shared_with_org: bool = serializer.validated_data.get("is_shared_with_org") - user_ids = set(serializer.validated_data.get("user_id")) - - force_export = serializer.validated_data.get("force_export") - PromptStudioRegistryHelper.update_or_create_psr_tool( - custom_tool=custom_tool, - shared_with_org=is_shared_with_org, - user_ids=user_ids, - force_export=force_export, - ) - return Response( - {"message": "Custom tool exported sucessfully."}, - status=status.HTTP_200_OK, - ) - - @action(detail=True, methods=["get"]) - def export_tool_info(self, request: Request, pk: Any = None) -> Response: - custom_tool = self.get_object() - serialized_instances = None - if hasattr(custom_tool, "prompt_studio_registry"): - serialized_instances = PromptStudioRegistryInfoSerializer( - custom_tool.prompt_studio_registry - ).data - - return Response(serialized_instances) - else: - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/backend/prompt_studio/prompt_studio_core_v2/constants.py b/backend/prompt_studio/prompt_studio_core_v2/constants.py index f5f27f3f1..45329d98e 100644 --- a/backend/prompt_studio/prompt_studio_core_v2/constants.py +++ b/backend/prompt_studio/prompt_studio_core_v2/constants.py @@ -97,7 +97,7 @@ class ToolStudioPromptKeys: FILE_PATH = "file_path" ENABLE_HIGHLIGHT = "enable_highlight" REQUIRED = "required" - + EXECUTION_SOURCE = "execution_source" class FileViewTypes: ORIGINAL = "ORIGINAL" @@ -133,3 +133,15 @@ class DefaultPrompts: "Do not include any explanation in the reply. " "Only include the extracted information in the reply." ) + + +class ExecutionSource(Enum): + """Enum to indicate the source of invocation. + Any new sources can be added to this enum. + This is to indicate the prompt service. + + Args: + Enum (_type_): ide/tool + """ + + IDE = "ide" diff --git a/backend/prompt_studio/prompt_studio_core_v2/prompt_studio_helper.py b/backend/prompt_studio/prompt_studio_core_v2/prompt_studio_helper.py index 06ffc106f..f3f9e8972 100644 --- a/backend/prompt_studio/prompt_studio_core_v2/prompt_studio_helper.py +++ b/backend/prompt_studio/prompt_studio_core_v2/prompt_studio_helper.py @@ -19,7 +19,11 @@ from prompt_studio.prompt_profile_manager_v2.profile_manager_helper import ( ProfileManagerHelper, ) -from prompt_studio.prompt_studio_core_v2.constants import IndexingStatus, LogLevels +from prompt_studio.prompt_studio_core_v2.constants import ( + ExecutionSource, + IndexingStatus, + LogLevels, +) from prompt_studio.prompt_studio_core_v2.constants import ( ToolStudioPromptKeys as TSPKeys, ) @@ -1177,6 +1181,7 @@ def _fetch_single_pass_response( TSPKeys.FILE_HASH: file_hash, TSPKeys.FILE_NAME: doc_name, Common.LOG_EVENTS_ID: StateStore.get(Common.LOG_EVENTS_ID), + TSPKeys.EXECUTION_SOURCE: ExecutionSource.IDE.value, } util = PromptIdeBaseTool(log_level=LogLevel.INFO, org_id=org_id) diff --git a/backend/prompt_studio/prompt_studio_core_v2/serializers.py b/backend/prompt_studio/prompt_studio_core_v2/serializers.py index bd9e32262..d6e79483a 100644 --- a/backend/prompt_studio/prompt_studio_core_v2/serializers.py +++ b/backend/prompt_studio/prompt_studio_core_v2/serializers.py @@ -44,48 +44,71 @@ class Meta: def to_representation(self, instance): # type: ignore data = super().to_representation(instance) + default_profile = None + + # Fetch summarize LLM profile try: - profile_manager = ProfileManager.objects.get( + summarize_profile = ProfileManager.objects.get( prompt_studio_tool=instance, is_summarize_llm=True ) - data[TSKeys.SUMMARIZE_LLM_PROFILE] = profile_manager.profile_id + data[TSKeys.SUMMARIZE_LLM_PROFILE] = summarize_profile.profile_id except ObjectDoesNotExist: logger.info( - "Summarize LLM profile doesnt exist for prompt tool %s", + "Summarize LLM profile doesn't exist for prompt tool %s", str(instance.tool_id), ) + + # Fetch default LLM profile try: - profile_manager = ProfileManager.get_default_llm_profile(instance) - data[TSKeys.DEFAULT_PROFILE] = profile_manager.profile_id + default_profile = ProfileManager.get_default_llm_profile(instance) + data[TSKeys.DEFAULT_PROFILE] = default_profile.profile_id except DefaultProfileError: + # To make it compatible with older projects error suppressed with warning. logger.warning( - "Default LLM profile doesnt exist for prompt tool %s", + "Default LLM profile doesn't exist for prompt tool %s", str(instance.tool_id), ) - prompt_instance: ToolStudioPrompt = ToolStudioPrompt.objects.filter( + + # Fetch prompt instances + prompt_instances: ToolStudioPrompt = ToolStudioPrompt.objects.filter( tool_id=data.get(TSKeys.TOOL_ID) ).order_by("sequence_number") - data[TSKeys.PROMPTS] = [] + + if not prompt_instances.exists(): + data[TSKeys.PROMPTS] = [] + return data + + # Process prompt instances output: list[Any] = [] - # Appending prompt instances of the tool for FE Processing - if prompt_instance.count() != 0: - for prompt in prompt_instance: - profile_manager_id = prompt.prompt_id - if instance.single_pass_extraction_mode: - # use projects default profile - profile_manager_id = profile_manager.profile_id - prompt_serializer = ToolStudioPromptSerializer(prompt) + for prompt in prompt_instances: + prompt_serializer = ToolStudioPromptSerializer(prompt) + serialized_data = prompt_serializer.data + + # Determine coverage + coverage: list[Any] = [] + profile_manager_id = prompt.profile_manager + if default_profile and instance.single_pass_extraction_mode: + profile_manager_id = default_profile.profile_id + + if profile_manager_id: coverage = OutputManagerUtils.get_coverage( data.get(TSKeys.TOOL_ID), profile_manager_id, prompt.prompt_id, instance.single_pass_extraction_mode, ) - serialized_data = prompt_serializer.data - serialized_data["coverage"] = coverage - output.append(serialized_data) - data[TSKeys.PROMPTS] = output + else: + logger.info( + "Skipping coverage calculation for prompt %s " + "due to missing profile ID", + str(prompt.prompt_key), + ) + + # Add coverage to serialized data + serialized_data["coverage"] = coverage + output.append(serialized_data) + data[TSKeys.PROMPTS] = output data["created_by_email"] = instance.created_by.email return data diff --git a/backend/prompt_studio/prompt_studio_document_manager/__init__.py b/backend/prompt_studio/prompt_studio_document_manager/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_document_manager/admin.py b/backend/prompt_studio/prompt_studio_document_manager/admin.py deleted file mode 100644 index a16311f1b..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import DocumentManager - -admin.site.register(DocumentManager) diff --git a/backend/prompt_studio/prompt_studio_document_manager/apps.py b/backend/prompt_studio/prompt_studio_document_manager/apps.py deleted file mode 100644 index b89cdd4a2..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class PromptStudioDocumentManagerConfig(AppConfig): - name = "prompt_studio.prompt_studio_document_manager" diff --git a/backend/prompt_studio/prompt_studio_document_manager/constants.py b/backend/prompt_studio/prompt_studio_document_manager/constants.py deleted file mode 100644 index 2ac55ca83..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/constants.py +++ /dev/null @@ -1,4 +0,0 @@ -class PSDMKeys: - DOCUMENT_NAME = "document_name" - TOOL = "tool" - DOCUMENT_ID = "document_id" diff --git a/backend/prompt_studio/prompt_studio_document_manager/migrations/0001_initial.py b/backend/prompt_studio/prompt_studio_document_manager/migrations/0001_initial.py deleted file mode 100644 index 4132bf298..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/migrations/0001_initial.py +++ /dev/null @@ -1,82 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-10 20:48 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ( - "prompt_studio_core", - "0007_remove_customtool_default_profile_and_more", - ), - ] - - operations = [ - migrations.CreateModel( - name="DocumentManager", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "document_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "document_name", - models.CharField( - db_comment="Field to store the document name", - editable=False, - ), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_document_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_document_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "tool", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_document_linked_tool", - to="prompt_studio_core.customtool", - ), - ), - ], - ), - migrations.AddConstraint( - model_name="documentmanager", - constraint=models.UniqueConstraint( - fields=("document_name", "tool"), - name="unique_document_name_tool", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_document_manager/migrations/__init__.py b/backend/prompt_studio/prompt_studio_document_manager/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_document_manager/models.py b/backend/prompt_studio/prompt_studio_document_manager/models.py deleted file mode 100644 index 256d60e45..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/models.py +++ /dev/null @@ -1,53 +0,0 @@ -import uuid - -from account.models import User -from django.db import models -from prompt_studio.prompt_studio_core.models import CustomTool -from utils.models.base_model import BaseModel - - -class DocumentManager(BaseModel): - """Model to store the document details.""" - - document_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - - document_name = models.CharField( - db_comment="Field to store the document name", - editable=False, - null=False, - blank=False, - ) - - tool = models.ForeignKey( - CustomTool, - on_delete=models.CASCADE, - related_name="prompt_document_linked_tool", - null=False, - blank=False, - ) - - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_document_created_by", - null=True, - blank=True, - editable=False, - ) - - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_document_modified_by", - null=True, - blank=True, - editable=False, - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["document_name", "tool"], - name="unique_document_name_tool", - ), - ] diff --git a/backend/prompt_studio/prompt_studio_document_manager/prompt_studio_document_helper.py b/backend/prompt_studio/prompt_studio_document_manager/prompt_studio_document_helper.py deleted file mode 100644 index bf28a285e..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/prompt_studio_document_helper.py +++ /dev/null @@ -1,24 +0,0 @@ -import logging - -from prompt_studio.prompt_studio_core.models import CustomTool - -from .models import DocumentManager - -logger = logging.getLogger(__name__) - - -class PromptStudioDocumentHelper: - @staticmethod - def create(tool_id: str, document_name: str) -> DocumentManager: - tool: CustomTool = CustomTool.objects.get(pk=tool_id) - document: DocumentManager = DocumentManager.objects.create( - tool=tool, document_name=document_name - ) - logger.info("Successfully created the record") - return document - - @staticmethod - def delete(document_id: str) -> None: - document: DocumentManager = DocumentManager.objects.get(pk=document_id) - document.delete() - logger.info("Successfully deleted the record") diff --git a/backend/prompt_studio/prompt_studio_document_manager/serializers.py b/backend/prompt_studio/prompt_studio_document_manager/serializers.py deleted file mode 100644 index 844500fc8..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/serializers.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Any - -from backend.serializers import AuditSerializer - -from .constants import PSDMKeys -from .models import DocumentManager - - -class PromptStudioDocumentManagerSerializer(AuditSerializer): - class Meta: - model = DocumentManager - fields = "__all__" - - def to_representation(self, instance: DocumentManager) -> dict[str, Any]: - rep: dict[str, str] = super().to_representation(instance) - required_fields = [ - PSDMKeys.DOCUMENT_NAME, - PSDMKeys.TOOL, - PSDMKeys.DOCUMENT_ID, - ] - return {key: rep[key] for key in required_fields if key in rep} diff --git a/backend/prompt_studio/prompt_studio_document_manager/urls.py b/backend/prompt_studio/prompt_studio_document_manager/urls.py deleted file mode 100644 index f9fb9bcd3..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/urls.py +++ /dev/null @@ -1,24 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns - -from .views import PromptStudioDocumentManagerView - -prompt_studio_documents_list = PromptStudioDocumentManagerView.as_view( - {"get": "list", "post": "create"} -) - -prompt_studio_documents_detail = PromptStudioDocumentManagerView.as_view( - { - "get": "retrieve", - } -) - -urlpatterns = format_suffix_patterns( - [ - path( - "prompt-document/", - prompt_studio_documents_list, - name="prompt-studio-documents-list", - ), - ] -) diff --git a/backend/prompt_studio/prompt_studio_document_manager/views.py b/backend/prompt_studio/prompt_studio_document_manager/views.py deleted file mode 100644 index 49b2f0227..000000000 --- a/backend/prompt_studio/prompt_studio_document_manager/views.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Optional - -from django.db.models import QuerySet -from prompt_studio.prompt_studio_document_manager.serializers import ( - PromptStudioDocumentManagerSerializer, -) -from prompt_studio.prompt_studio_output_manager.constants import ( - PromptStudioOutputManagerKeys, -) -from rest_framework import viewsets -from rest_framework.versioning import URLPathVersioning -from utils.filtering import FilterHelper - -from .models import DocumentManager - - -class PromptStudioDocumentManagerView(viewsets.ModelViewSet): - versioning_class = URLPathVersioning - queryset = DocumentManager.objects.all() - serializer_class = PromptStudioDocumentManagerSerializer - - def get_queryset(self) -> Optional[QuerySet]: - filter_args = FilterHelper.build_filter_args( - self.request, - PromptStudioOutputManagerKeys.TOOL_ID, - ) - queryset = None - if filter_args: - queryset = DocumentManager.objects.filter(**filter_args) - - return queryset diff --git a/backend/prompt_studio/prompt_studio_index_manager/__init__.py b/backend/prompt_studio/prompt_studio_index_manager/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_index_manager/admin.py b/backend/prompt_studio/prompt_studio_index_manager/admin.py deleted file mode 100644 index 188d50ffe..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import IndexManager - -admin.site.register(IndexManager) diff --git a/backend/prompt_studio/prompt_studio_index_manager/apps.py b/backend/prompt_studio/prompt_studio_index_manager/apps.py deleted file mode 100644 index 19dc5f79c..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class PromptStudioIndexManagerConfig(AppConfig): - name = "prompt_studio.prompt_studio_index_manager" diff --git a/backend/prompt_studio/prompt_studio_index_manager/constants.py b/backend/prompt_studio/prompt_studio_index_manager/constants.py deleted file mode 100644 index 6cf3f5e5b..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/constants.py +++ /dev/null @@ -1,3 +0,0 @@ -class IndexManagerKeys: - PROFILE_MANAGER = "profile_manager" - DOCUMENT_MANAGER = "document_manager" diff --git a/backend/prompt_studio/prompt_studio_index_manager/migrations/0001_initial.py b/backend/prompt_studio/prompt_studio_index_manager/migrations/0001_initial.py deleted file mode 100644 index e2a5bb3d6..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/migrations/0001_initial.py +++ /dev/null @@ -1,107 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-10 20:48 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("prompt_studio_document_manager", "0001_initial"), - ("prompt_profile_manager", "0008_profilemanager_migration"), - ] - - operations = [ - migrations.CreateModel( - name="IndexManager", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "index_manager_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "raw_index_id", - models.CharField( - blank=True, - db_comment="Field to store the raw index id", - editable=False, - null=True, - ), - ), - ( - "summarize_index_id", - models.CharField( - blank=True, - db_comment="Field to store the summarize index id", - editable=False, - null=True, - ), - ), - ( - "index_ids_history", - models.JSONField(db_comment="List of index ids", default=list), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_index_manager_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "document_manager", - models.ForeignKey( - editable=False, - on_delete=django.db.models.deletion.CASCADE, - related_name="index_manager_linked_document", - to="prompt_studio_document_manager.documentmanager", - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_index_manager_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "profile_manager", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="index_manager_linked_raw_llm_profile", - to="prompt_profile_manager.profilemanager", - ), - ), - ], - ), - migrations.AddConstraint( - model_name="indexmanager", - constraint=models.UniqueConstraint( - fields=("document_manager", "profile_manager"), - name="unique_document_manager_profile_manager", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_index_manager/migrations/0002_remove_incorrect_index_keys.py b/backend/prompt_studio/prompt_studio_index_manager/migrations/0002_remove_incorrect_index_keys.py deleted file mode 100644 index 205863fb1..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/migrations/0002_remove_incorrect_index_keys.py +++ /dev/null @@ -1,46 +0,0 @@ -# Generated by Django 4.2.1 on 2024-04-24 14:03 - -import logging -from typing import Any - -from django.db import migrations, models - -logger = logging.getLogger(__name__) - -# This migration flushes all records in IndexManager which have an incorrect index -# key generated from an older implementation -# Refer https://github.com/Zipstack/unstract-sdk/blob/1c61f4325a38e1bd25e207ca2c426971aa7b62cf/src/unstract/sdk/index.py#L322C1-L349C10 # noqa E501 - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio_index_manager", "0001_initial"), - ] - - def remove_incorrect_index_keys(apps: Any, schema_editor: Any) -> None: - index_manager_model = apps.get_model( - "prompt_studio_index_manager", "IndexManager" - ) - - incorrect_index_pattern = ( - r"[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}\|" # Tool ID - r"[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}\|" # Vector DB - r"[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}\|" # Embedding - r"[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}\|" # X2Text - r"\d+\|\d+\|" # Chunk size and chunk overlap - r"[\da-f]{64}" # SHA-256 file hash - ) - - incorrect_indexes = index_manager_model.objects.filter( - models.Q(raw_index_id__regex=incorrect_index_pattern) - | models.Q(summarize_index_id__regex=incorrect_index_pattern) - ) - logger.info(f"Deleting index manager records: {incorrect_indexes}") - incorrect_indexes.delete() - - operations = [ - migrations.RunPython( - remove_incorrect_index_keys, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/backend/prompt_studio/prompt_studio_index_manager/migrations/__init__.py b/backend/prompt_studio/prompt_studio_index_manager/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_index_manager/models.py b/backend/prompt_studio/prompt_studio_index_manager/models.py deleted file mode 100644 index 9129b9cd9..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/models.py +++ /dev/null @@ -1,126 +0,0 @@ -import json -import logging -import uuid - -from account.models import User -from django.db import connection, models -from django.db.models.signals import pre_delete -from django.dispatch import receiver -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio_core.prompt_ide_base_tool import PromptIdeBaseTool -from prompt_studio.prompt_studio_document_manager.models import DocumentManager -from unstract.sdk.constants import LogLevel -from unstract.sdk.vector_db import VectorDB -from utils.models.base_model import BaseModel - -logger = logging.getLogger(__name__) - - -class IndexManager(BaseModel): - """Model to store the index details.""" - - index_manager_id = models.UUIDField( - primary_key=True, default=uuid.uuid4, editable=False - ) - - document_manager = models.ForeignKey( - DocumentManager, - on_delete=models.CASCADE, - related_name="index_manager_linked_document", - editable=False, - null=False, - blank=False, - ) - - profile_manager = models.ForeignKey( - ProfileManager, - on_delete=models.SET_NULL, - related_name="index_manager_linked_raw_llm_profile", - editable=False, - null=True, - blank=True, - ) - - raw_index_id = models.CharField( - db_comment="Field to store the raw index id", - editable=False, - null=True, - blank=True, - ) - - summarize_index_id = models.CharField( - db_comment="Field to store the summarize index id", - editable=False, - null=True, - blank=True, - ) - - index_ids_history = models.JSONField( - db_comment="List of index ids", - default=list, - null=False, - blank=False, - ) - - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_index_manager_created_by", - null=True, - blank=True, - editable=False, - ) - - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_index_manager_modified_by", - null=True, - blank=True, - editable=False, - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["document_manager", "profile_manager"], - name="unique_document_manager_profile_manager", - ), - ] - - -def delete_from_vector_db(index_ids_history, vector_db_instance_id): - org_schema = connection.tenant.schema_name - util = PromptIdeBaseTool(log_level=LogLevel.INFO, org_id=org_schema) - vector_db = VectorDB( - tool=util, - adapter_instance_id=vector_db_instance_id, - ) - for index_id in index_ids_history: - logger.debug(f"Deleting from VectorDB - index id: {index_id}") - try: - vector_db.delete(ref_doc_id=index_id) - except Exception as e: - # Log error and continue with the next index id - logger.error(f"Error deleting index: {index_id} - {e}") - - -# Function will be executed every time an instance of IndexManager is deleted. -@receiver(pre_delete, sender=IndexManager) -def perform_vector_db_cleanup(sender, instance, **kwargs): - """Signal to perform vector db cleanup.""" - logger.info("Performing vector db cleanup") - logger.debug(f"Document tool id: {instance.document_manager.tool_id}") - try: - # Get the index_ids_history to clean up from the vector db - index_ids_history = json.loads(instance.index_ids_history) - vector_db_instance_id = str(instance.profile_manager.vector_store.id) - delete_from_vector_db(index_ids_history, vector_db_instance_id) - except Exception as e: - logger.warning( - "Error during vector DB cleanup for deleted document " - "in prompt studio tool %s: %s", - instance.document_manager.tool_id, - e, - exc_info=True, # For additional stack trace - ) diff --git a/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py b/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py deleted file mode 100644 index 49cae9141..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py +++ /dev/null @@ -1,66 +0,0 @@ -import json -import logging - -from django.db import transaction -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio_core.exceptions import IndexingAPIError -from prompt_studio.prompt_studio_document_manager.models import DocumentManager - -from .models import IndexManager - -logger = logging.getLogger(__name__) - - -class PromptStudioIndexHelper: - @staticmethod - def handle_index_manager( - document_id: str, - is_summary: bool, - profile_manager: ProfileManager, - doc_id: str, - ) -> IndexManager: - try: - - with transaction.atomic(): - - document: DocumentManager = DocumentManager.objects.get(pk=document_id) - - index_id = "raw_index_id" - if is_summary: - index_id = "summarize_index_id" - - args: dict[str, str] = dict() - args["document_manager"] = document - args["profile_manager"] = profile_manager - - # Create or get the existing record for this document and - # profile combo - index_manager, success = IndexManager.objects.get_or_create(**args) - - if success: - logger.info( - f"Index manager doc_id: {doc_id} for " - f"profile {profile_manager.profile_id} created" - ) - else: - logger.info( - f"Index manager doc_id: {doc_id} for " - f"profile {profile_manager.profile_id} updated" - ) - - index_ids = index_manager.index_ids_history - index_ids_list = json.loads(index_ids) if index_ids else [] - if doc_id not in index_ids: - index_ids_list.append(doc_id) - - args[index_id] = doc_id - args["index_ids_history"] = json.dumps(index_ids_list) - - # Update the record with the index id - result: IndexManager = IndexManager.objects.filter( - index_manager_id=index_manager.index_manager_id - ).update(**args) - return result - except Exception as e: - transaction.rollback() - raise IndexingAPIError("Error updating indexing status") from e diff --git a/backend/prompt_studio/prompt_studio_index_manager/serializers.py b/backend/prompt_studio/prompt_studio_index_manager/serializers.py deleted file mode 100644 index ae33a186e..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/serializers.py +++ /dev/null @@ -1,9 +0,0 @@ -from backend.serializers import AuditSerializer - -from .models import IndexManager - - -class IndexManagerSerializer(AuditSerializer): - class Meta: - model = IndexManager - fields = "__all__" diff --git a/backend/prompt_studio/prompt_studio_index_manager/urls.py b/backend/prompt_studio/prompt_studio_index_manager/urls.py deleted file mode 100644 index b082bc1d8..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/urls.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns - -from .views import IndexManagerView - -prompt_studio_index_list = IndexManagerView.as_view({"get": "list", "post": "create"}) - -prompt_studio_index_detail = IndexManagerView.as_view( - { - "get": "retrieve", - } -) - -urlpatterns = format_suffix_patterns( - [ - path( - "document-index/", - prompt_studio_index_list, - name="prompt-studio-documents-list", - ), - ] -) diff --git a/backend/prompt_studio/prompt_studio_index_manager/views.py b/backend/prompt_studio/prompt_studio_index_manager/views.py deleted file mode 100644 index 414db28cd..000000000 --- a/backend/prompt_studio/prompt_studio_index_manager/views.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Optional - -from django.db.models import QuerySet -from prompt_studio.prompt_studio_index_manager.constants import IndexManagerKeys -from prompt_studio.prompt_studio_index_manager.serializers import IndexManagerSerializer -from rest_framework import viewsets -from rest_framework.versioning import URLPathVersioning -from utils.filtering import FilterHelper - -from .models import IndexManager - - -class IndexManagerView(viewsets.ModelViewSet): - versioning_class = URLPathVersioning - queryset = IndexManager.objects.all() - serializer_class = IndexManagerSerializer - - def get_queryset(self) -> Optional[QuerySet]: - filter_args = FilterHelper.build_filter_args( - self.request, - IndexManagerKeys.PROFILE_MANAGER, - IndexManagerKeys.DOCUMENT_MANAGER, - ) - queryset = None - if filter_args: - queryset = IndexManager.objects.filter(**filter_args) - return queryset diff --git a/backend/prompt_studio/prompt_studio_output_manager/__init__.py b/backend/prompt_studio/prompt_studio_output_manager/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_output_manager/admin.py b/backend/prompt_studio/prompt_studio_output_manager/admin.py deleted file mode 100644 index 00fd98264..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import PromptStudioOutputManager - -admin.site.register(PromptStudioOutputManager) diff --git a/backend/prompt_studio/prompt_studio_output_manager/apps.py b/backend/prompt_studio/prompt_studio_output_manager/apps.py deleted file mode 100644 index 055b16d8d..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class PromptStudioOutputManager(AppConfig): - name = "prompt_studio.prompt_studio_output_manager" diff --git a/backend/prompt_studio/prompt_studio_output_manager/constants.py b/backend/prompt_studio/prompt_studio_output_manager/constants.py deleted file mode 100644 index 1cee0b394..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/constants.py +++ /dev/null @@ -1,12 +0,0 @@ -class PromptStudioOutputManagerKeys: - TOOL_ID = "tool_id" - PROMPT_ID = "prompt_id" - PROFILE_MANAGER = "profile_manager" - DOCUMENT_MANAGER = "document_manager" - IS_SINGLE_PASS_EXTRACT = "is_single_pass_extract" - NOTES = "NOTES" - - -class PromptOutputManagerErrorMessage: - TOOL_VALIDATION = "tool_id parameter is required" - TOOL_NOT_FOUND = "Tool not found" diff --git a/backend/prompt_studio/prompt_studio_output_manager/exceptions.py b/backend/prompt_studio/prompt_studio_output_manager/exceptions.py deleted file mode 100644 index f11530914..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -from rest_framework.exceptions import APIException - - -class InternalError(APIException): - status_code = 400 - default_detail = "Internal service error." diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0001_initial.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0001_initial.py deleted file mode 100644 index b0eaae9ae..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0001_initial.py +++ /dev/null @@ -1,99 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-07 11:20 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - ("prompt_studio_core", "0003_merge_20240125_1501"), - ("prompt_studio", "0003_remove_toolstudioprompt_updated_at_and_more"), - ("prompt_profile_manager", "0003_merge_20240125_0530"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="PromptStudioOutputManager", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "prompt_output_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "output", - models.CharField(db_comment="Field to store output"), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_output_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_output_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "profile_manager", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_output_linked_prompt", - to="prompt_profile_manager.profilemanager", - ), - ), - ( - "prompt_id", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_output_linked_prompt", - to="prompt_studio.toolstudioprompt", - ), - ), - ( - "tool_id", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_ouput_linked_tool", - to="prompt_studio_core.customtool", - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0002_promptstudiooutputmanager_doc_name.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0002_promptstudiooutputmanager_doc_name.py deleted file mode 100644 index 527d0b452..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0002_promptstudiooutputmanager_doc_name.py +++ /dev/null @@ -1,22 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-07 19:42 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_output_manager", "0001_initial"), - ] - - operations = [ - migrations.AddField( - model_name="promptstudiooutputmanager", - name="doc_name", - field=models.CharField( - blank=True, - db_comment="Field to store the document name", - editable=False, - null=True, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0003_alter_promptstudiooutputmanager_doc_name.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0003_alter_promptstudiooutputmanager_doc_name.py deleted file mode 100644 index 69f2c5a15..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0003_alter_promptstudiooutputmanager_doc_name.py +++ /dev/null @@ -1,26 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-07 19:53 - -import django.utils.timezone -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_output_manager", - "0002_promptstudiooutputmanager_doc_name", - ), - ] - - operations = [ - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="doc_name", - field=models.CharField( - db_comment="Field to store the document name", - default=django.utils.timezone.now, - editable=False, - ), - preserve_default=False, - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0004_alter_promptstudiooutputmanager_doc_name.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0004_alter_promptstudiooutputmanager_doc_name.py deleted file mode 100644 index b51b80233..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0004_alter_promptstudiooutputmanager_doc_name.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-07 20:50 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_output_manager", - "0003_alter_promptstudiooutputmanager_doc_name", - ), - ] - - operations = [ - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="doc_name", - field=models.CharField(db_comment="Field to store the document name"), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0005_alter_promptstudiooutputmanager_profile_manager_and_more.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0005_alter_promptstudiooutputmanager_profile_manager_and_more.py deleted file mode 100644 index 2274060e5..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0005_alter_promptstudiooutputmanager_profile_manager_and_more.py +++ /dev/null @@ -1,52 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-07 20:53 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_core", "0003_merge_20240125_1501"), - ("prompt_studio", "0003_remove_toolstudioprompt_updated_at_and_more"), - ("prompt_profile_manager", "0003_merge_20240125_0530"), - ( - "prompt_studio_output_manager", - "0004_alter_promptstudiooutputmanager_doc_name", - ), - ] - - operations = [ - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="profile_manager", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_output_linked_prompt", - to="prompt_profile_manager.profilemanager", - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="prompt_id", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_output_linked_prompt", - to="prompt_studio.toolstudioprompt", - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="tool_id", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_ouput_linked_tool", - to="prompt_studio_core.customtool", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0006_alter_promptstudiooutputmanager_output.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0006_alter_promptstudiooutputmanager_output.py deleted file mode 100644 index ca898210f..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0006_alter_promptstudiooutputmanager_output.py +++ /dev/null @@ -1,22 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-13 12:14 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_output_manager", - "0005_alter_promptstudiooutputmanager_profile_manager_and_more", - ), - ] - - operations = [ - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="output", - field=models.CharField( - blank=True, db_comment="Field to store output", null=True - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0007_promptstudiooutputmanager_eval_metrics.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0007_promptstudiooutputmanager_eval_metrics.py deleted file mode 100644 index b57a9bc6a..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0007_promptstudiooutputmanager_eval_metrics.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-27 05:42 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_output_manager", - "0006_alter_promptstudiooutputmanager_output", - ), - ] - - operations = [ - migrations.AddField( - model_name="promptstudiooutputmanager", - name="eval_metrics", - field=models.JSONField( - db_column="eval_metrics", - db_comment="Field to store the evaluation metrics", - default=list, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0008_delete_existing_rows.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0008_delete_existing_rows.py deleted file mode 100644 index 0ac7584a9..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0008_delete_existing_rows.py +++ /dev/null @@ -1,25 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-11 05:20 - -from django.db import migrations - - -def delete_existing_rows(apps, _): - promptstudiooutputmanager = apps.get_model( - "prompt_studio_output_manager", "promptstudiooutputmanager" - ) - promptstudiooutputmanager.objects.all().delete() - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_output_manager", - "0007_promptstudiooutputmanager_eval_metrics", - ), - ] - - operations = [ - migrations.RunPython( - delete_existing_rows, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0009_remove_promptstudiooutputmanager_doc_name_and_more.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0009_remove_promptstudiooutputmanager_doc_name_and_more.py deleted file mode 100644 index 8a6000942..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0009_remove_promptstudiooutputmanager_doc_name_and_more.py +++ /dev/null @@ -1,68 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-11 06:07 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_document_manager", "0001_initial"), - ( - "prompt_studio_core", - "0007_remove_customtool_default_profile_and_more", - ), - ("prompt_profile_manager", "0008_profilemanager_migration"), - ("prompt_studio", "0006_alter_toolstudioprompt_prompt_key_and_more"), - ("prompt_studio_output_manager", "0008_delete_existing_rows"), - ] - - operations = [ - migrations.RemoveField( - model_name="promptstudiooutputmanager", - name="doc_name", - ), - migrations.AddField( - model_name="promptstudiooutputmanager", - name="document_manager", - field=models.ForeignKey( - default=None, - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_output_linked_document_manager", - to="prompt_studio_document_manager.documentmanager", - ), - preserve_default=False, - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="profile_manager", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_output_linked_prompt", - to="prompt_profile_manager.profilemanager", - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="prompt_id", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_output_linked_prompt", - to="prompt_studio.toolstudioprompt", - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="tool_id", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_ouput_linked_tool", - to="prompt_studio_core.customtool", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0010_delete_duplicate_rows.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0010_delete_duplicate_rows.py deleted file mode 100644 index 14a63fe89..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0010_delete_duplicate_rows.py +++ /dev/null @@ -1,58 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-22 11:19 - -from django.db import migrations, models - - -def delete_duplicates_and_nulls(apps, schema_editor): - prompt_studio_output_manager = apps.get_model( - "prompt_studio_output_manager", "PromptStudioOutputManager" - ) - - # Delete rows where prompt_id, document_manager, profile_manager, or tool_id is NULL - prompt_studio_output_manager.objects.filter( - models.Q(prompt_id=None) - | models.Q(document_manager=None) - | models.Q(profile_manager=None) - | models.Q(tool_id=None) - ).delete() - - # Find duplicate rows based on unique constraint fields and count their occurrences - duplicates = ( - prompt_studio_output_manager.objects.values( - "prompt_id", "document_manager", "profile_manager", "tool_id" - ) - .annotate(count=models.Count("prompt_output_id")) - .filter(count__gt=1) # Filter to only get rows that have duplicates - ) - - # Iterate over each set of duplicates found - for duplicate in duplicates: - # Find all instances of duplicates for the current set - pks = ( - prompt_studio_output_manager.objects.filter( - prompt_id=duplicate["prompt_id"], - document_manager=duplicate["document_manager"], - profile_manager=duplicate["profile_manager"], - tool_id=duplicate["tool_id"], - ) - .order_by("-created_at") - .values_list("pk")[1:] - ) # Order by created_at descending and skip the first one (keep the latest) - - # Delete the duplicate rows - prompt_studio_output_manager.objects.filter(pk__in=pks).delete() - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_output_manager", - "0009_remove_promptstudiooutputmanager_doc_name_and_more", - ), - ] - - operations = [ - migrations.RunPython( - delete_duplicates_and_nulls, reverse_code=migrations.RunPython.noop - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0011_promptstudiooutputmanager_is_single_pass_extract_and_more.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0011_promptstudiooutputmanager_is_single_pass_extract_and_more.py deleted file mode 100644 index adae6726e..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0011_promptstudiooutputmanager_is_single_pass_extract_and_more.py +++ /dev/null @@ -1,70 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-23 08:12 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_core", - "0008_customtool_exclude_failed_customtool_monitor_llm", - ), - ( - "prompt_profile_manager", - "0009_alter_profilemanager_prompt_studio_tool", - ), - ("prompt_studio", "0006_alter_toolstudioprompt_prompt_key_and_more"), - ("prompt_studio_output_manager", "0010_delete_duplicate_rows"), - ] - - operations = [ - migrations.AddField( - model_name="promptstudiooutputmanager", - name="is_single_pass_extract", - field=models.BooleanField( - db_comment="Is the single pass extraction mode active", - default=False, - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="profile_manager", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_output_linked_prompt", - to="prompt_profile_manager.profilemanager", - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="prompt_id", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_output_linked_prompt", - to="prompt_studio.toolstudioprompt", - ), - ), - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="tool_id", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_ouput_linked_tool", - to="prompt_studio_core.customtool", - ), - ), - migrations.AddConstraint( - model_name="promptstudiooutputmanager", - constraint=models.UniqueConstraint( - fields=( - "prompt_id", - "document_manager", - "profile_manager", - "tool_id", - "is_single_pass_extract", - ), - name="unique_prompt_output", - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0012_promptstudiooutputmanager_run_id.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0012_promptstudiooutputmanager_run_id.py deleted file mode 100644 index 3f0c9c435..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0012_promptstudiooutputmanager_run_id.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 4.2.1 on 2024-05-14 11:03 - -import uuid - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ( - "prompt_studio_output_manager", - "0011_promptstudiooutputmanager_is_single_pass_extract_and_more", - ), - ] - - operations = [ - migrations.AddField( - model_name="promptstudiooutputmanager", - name="run_id", - field=models.UUIDField(default=uuid.uuid4, editable=False), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0013_promptstudiooutputmanager_context.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0013_promptstudiooutputmanager_context.py deleted file mode 100644 index 9d72dbd4d..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0013_promptstudiooutputmanager_context.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 4.2.1 on 2024-06-27 18:27 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio_output_manager", "0012_promptstudiooutputmanager_run_id"), - ] - - operations = [ - migrations.AddField( - model_name="promptstudiooutputmanager", - name="context", - field=models.CharField( - blank=True, db_comment="Field to store chucks used", null=True - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0014_alter_promptstudiooutputmanager_context.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0014_alter_promptstudiooutputmanager_context.py deleted file mode 100644 index 9d7844eaa..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0014_alter_promptstudiooutputmanager_context.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 4.2.1 on 2024-06-30 17:17 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("prompt_studio_output_manager", "0013_promptstudiooutputmanager_context"), - ] - - operations = [ - migrations.AlterField( - model_name="promptstudiooutputmanager", - name="context", - field=models.TextField( - blank=True, db_comment="Field to store chunks used", null=True - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0015_promptstudiooutputmanager_challenge_data.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0015_promptstudiooutputmanager_challenge_data.py deleted file mode 100644 index 0f3987c13..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/migrations/0015_promptstudiooutputmanager_challenge_data.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 4.2.1 on 2024-08-21 00:59 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ( - "prompt_studio_output_manager", - "0014_alter_promptstudiooutputmanager_context", - ), - ] - - operations = [ - migrations.AddField( - model_name="promptstudiooutputmanager", - name="challenge_data", - field=models.JSONField( - blank=True, db_comment="Field to store challenge data", null=True - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/__init__.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_output_manager/models.py b/backend/prompt_studio/prompt_studio_output_manager/models.py deleted file mode 100644 index ac7e17178..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/models.py +++ /dev/null @@ -1,91 +0,0 @@ -import uuid - -from account.models import User -from django.db import models -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio_core.models import CustomTool -from prompt_studio.prompt_studio_document_manager.models import DocumentManager -from utils.models.base_model import BaseModel - - -class PromptStudioOutputManager(BaseModel): - """Data model to handle output persisitance for Project. - - By default the tools will be added to private tool hub. - """ - - prompt_output_id = models.UUIDField( - primary_key=True, default=uuid.uuid4, editable=False - ) - output = models.CharField( - db_comment="Field to store output", editable=True, null=True, blank=True - ) - context = models.TextField( - db_comment="Field to store chunks used", editable=True, null=True, blank=True - ) - challenge_data = models.JSONField( - db_comment="Field to store challenge data", editable=True, null=True, blank=True - ) - eval_metrics = models.JSONField( - db_column="eval_metrics", - null=False, - blank=False, - default=list, - db_comment="Field to store the evaluation metrics", - ) - is_single_pass_extract = models.BooleanField( - default=False, - db_comment="Is the single pass extraction mode active", - ) - prompt_id = models.ForeignKey( - ToolStudioPrompt, - on_delete=models.CASCADE, - related_name="prompt_output_linked_prompt", - ) - document_manager = models.ForeignKey( - DocumentManager, - on_delete=models.CASCADE, - related_name="prompt_output_linked_document_manager", - ) - profile_manager = models.ForeignKey( - ProfileManager, - on_delete=models.CASCADE, - related_name="prompt_output_linked_prompt", - ) - tool_id = models.ForeignKey( - CustomTool, - on_delete=models.CASCADE, - related_name="prompt_ouput_linked_tool", - ) - run_id = models.UUIDField(default=uuid.uuid4, editable=False) - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_output_created_by", - null=True, - blank=True, - editable=False, - ) - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_output_modified_by", - null=True, - blank=True, - editable=False, - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=[ - "prompt_id", - "document_manager", - "profile_manager", - "tool_id", - "is_single_pass_extract", - ], - name="unique_prompt_output", - ), - ] diff --git a/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py b/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py deleted file mode 100644 index c65ea94ea..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py +++ /dev/null @@ -1,234 +0,0 @@ -import json -import logging -from typing import Any, Optional - -from django.core.exceptions import ObjectDoesNotExist -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio_core.exceptions import ( - AnswerFetchError, - DefaultProfileError, -) -from prompt_studio.prompt_studio_core.models import CustomTool -from prompt_studio.prompt_studio_document_manager.models import DocumentManager -from prompt_studio.prompt_studio_output_manager.constants import ( - PromptStudioOutputManagerKeys as PSOMKeys, -) -from prompt_studio.prompt_studio_output_manager.models import PromptStudioOutputManager -from prompt_studio.prompt_studio_output_manager.serializers import ( - PromptStudioOutputSerializer, -) - -logger = logging.getLogger(__name__) - - -class OutputManagerHelper: - @staticmethod - def handle_prompt_output_update( - run_id: str, - prompts: list[ToolStudioPrompt], - outputs: Any, - document_id: str, - is_single_pass_extract: bool, - metadata: dict[str, Any], - profile_manager_id: Optional[str] = None, - ) -> list[dict[str, Any]]: - """Handles updating prompt outputs in the database and returns - serialized data. - - Args: - run_id (str): ID of the run. - prompts (list[ToolStudioPrompt]): List of prompts to update. - outputs (Any): Outputs corresponding to the prompts. - document_id (str): ID of the document. - profile_manager_id (Optional[str]): UUID of the profile manager. - is_single_pass_extract (bool): Flag indicating if single pass - extract is active. - metadata (dict[str, Any]): Metadata for the update. - - Returns: - list[dict[str, Any]]: List of serialized prompt output data. - """ - - def update_or_create_prompt_output( - prompt: ToolStudioPrompt, - profile_manager: ProfileManager, - output: str, - eval_metrics: list[Any], - tool: CustomTool, - context: str, - challenge_data: Optional[dict[str, Any]], - ) -> PromptStudioOutputManager: - """Handles creating or updating a single prompt output and returns - the instance.""" - try: - prompt_output, success = ( - PromptStudioOutputManager.objects.get_or_create( - document_manager=document_manager, - tool_id=tool, - profile_manager=profile_manager, - prompt_id=prompt, - is_single_pass_extract=is_single_pass_extract, - defaults={ - "output": output, - "eval_metrics": eval_metrics, - "context": context, - "challenge_data": challenge_data, - }, - ) - ) - - if success: - logger.info( - f"Created record for prompt_id: {prompt.prompt_id} and " - f"profile {profile_manager.profile_id}" - ) - else: - logger.info( - f"Updated record for prompt_id: {prompt.prompt_id} and " - f"profile {profile_manager.profile_id}" - ) - - args: dict[str, Any] = { - "run_id": run_id, - "output": output, - "eval_metrics": eval_metrics, - "context": context, - "challenge_data": challenge_data, - } - PromptStudioOutputManager.objects.filter( - document_manager=document_manager, - tool_id=tool, - profile_manager=profile_manager, - prompt_id=prompt, - is_single_pass_extract=is_single_pass_extract, - ).update(**args) - - # Refresh the prompt_output instance to get updated values - prompt_output.refresh_from_db() - - return prompt_output - - except Exception as e: - raise AnswerFetchError(f"Error updating prompt output {e}") from e - - # List to store serialized results - serialized_data: list[dict[str, Any]] = [] - context = metadata.get("context") - challenge_data = metadata.get("challenge_data") - - if not prompts: - return serialized_data - - tool = prompts[0].tool_id - default_profile = OutputManagerHelper.get_default_profile( - profile_manager_id, tool - ) - document_manager = DocumentManager.objects.get(pk=document_id) - - for prompt in prompts: - if prompt.prompt_type == PSOMKeys.NOTES: - continue - - if not is_single_pass_extract: - context = context.get(prompt.prompt_key) - if challenge_data: - challenge_data = challenge_data.get(prompt.prompt_key) - - if challenge_data: - challenge_data["file_name"] = metadata.get("file_name") - - output = outputs.get(prompt.prompt_key) - # TODO: use enums here - if prompt.enforce_type in {"json", "table", "record"}: - output = json.dumps(output) - profile_manager = default_profile - eval_metrics = outputs.get(f"{prompt.prompt_key}__evaluation", []) - - # Update or create the prompt output - prompt_output = update_or_create_prompt_output( - prompt=prompt, - profile_manager=profile_manager, - output=output, - eval_metrics=eval_metrics, - tool=tool, - context=json.dumps(context), - challenge_data=challenge_data, - ) - - # Serialize the instance - serializer = PromptStudioOutputSerializer(prompt_output) - serialized_data.append(serializer.data) - - return serialized_data - - @staticmethod - def get_default_profile( - profile_manager_id: Optional[str], tool: CustomTool - ) -> ProfileManager: - if profile_manager_id: - return OutputManagerHelper.fetch_profile_manager(profile_manager_id) - else: - return OutputManagerHelper.fetch_default_llm_profile(tool) - - @staticmethod - def fetch_profile_manager(profile_manager_id: str) -> ProfileManager: - try: - return ProfileManager.objects.get(profile_id=profile_manager_id) - except ProfileManager.DoesNotExist: - raise DefaultProfileError( - f"ProfileManager with ID {profile_manager_id} does not exist." - ) - - @staticmethod - def fetch_default_llm_profile(tool: CustomTool) -> ProfileManager: - try: - return ProfileManager.get_default_llm_profile(tool=tool) - except DefaultProfileError: - raise DefaultProfileError("Default ProfileManager does not exist.") - - @staticmethod - def fetch_default_output_response( - tool_studio_prompts: list[ToolStudioPrompt], document_manager_id: str - ) -> dict[str, Any]: - """Method to frame JSON responses for combined output for default for - default profile manager of the project. - - Args: - tool_studio_prompts (list[ToolStudioPrompt]) - document_manager_id (str) - - Returns: - dict[str, Any]: Formatted JSON response for combined output. - """ - # Initialize the result dictionary - result: dict[str, Any] = {} - # Iterate over ToolStudioPrompt records - for tool_prompt in tool_studio_prompts: - if tool_prompt.prompt_type == PSOMKeys.NOTES: - continue - prompt_id = str(tool_prompt.prompt_id) - profile_manager_id = tool_prompt.profile_manager_id - - # If profile_manager is not set, skip this record - if not profile_manager_id: - result[tool_prompt.prompt_key] = "" - continue - - try: - queryset = PromptStudioOutputManager.objects.filter( - prompt_id=prompt_id, - profile_manager=profile_manager_id, - is_single_pass_extract=False, - document_manager_id=document_manager_id, - ) - - if not queryset.exists(): - result[tool_prompt.prompt_key] = "" - continue - - for output in queryset: - result[tool_prompt.prompt_key] = output.output - except ObjectDoesNotExist: - result[tool_prompt.prompt_key] = "" - return result diff --git a/backend/prompt_studio/prompt_studio_output_manager/serializers.py b/backend/prompt_studio/prompt_studio_output_manager/serializers.py deleted file mode 100644 index 237a337ec..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/serializers.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -import logging - -from django.db.models import Count -from usage.helper import UsageHelper - -from backend.serializers import AuditSerializer - -from .models import PromptStudioOutputManager - -logger = logging.getLogger(__name__) - - -class PromptStudioOutputSerializer(AuditSerializer): - class Meta: - model = PromptStudioOutputManager - fields = "__all__" - - def to_representation(self, instance): - data = super().to_representation(instance) - try: - token_usage = UsageHelper.get_aggregated_token_count(instance.run_id) - except Exception as e: - logger.error( - "Error occured while fetching token usage for run_id" - f"{instance.run_id}: {e}" - ) - token_usage = {} - data["token_usage"] = token_usage - # Get the coverage for the current tool_id and profile_manager_id - try: - # Fetch all relevant outputs for the current tool and profile - related_outputs = ( - PromptStudioOutputManager.objects.filter( - tool_id=instance.tool_id, - profile_manager_id=instance.profile_manager_id, - prompt_id=instance.prompt_id, - ) - .values("prompt_id", "profile_manager_id") - .annotate(document_count=Count("document_manager_id")) - ) - - coverage = {} - for output in related_outputs: - prompt_key = str(output["prompt_id"]) - profile_key = str(output["profile_manager_id"]) - coverage[f"coverage_{profile_key}_{prompt_key}"] = output[ - "document_count" - ] - - data["coverage"] = coverage - except Exception as e: - logger.error( - "Error occurred while fetching " - f"coverage for tool_id {instance.tool_id} " - f"and profile_manager_id {instance.profile_manager_id}: {e}" - ) - data["coverage"] = {} - # Convert string to list - try: - data["context"] = json.loads(data["context"]) - except json.JSONDecodeError: - # Convert the old value of data["context"] to a list - data["context"] = [data["context"]] - return data diff --git a/backend/prompt_studio/prompt_studio_output_manager/urls.py b/backend/prompt_studio/prompt_studio_output_manager/urls.py deleted file mode 100644 index 61ec8540f..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/urls.py +++ /dev/null @@ -1,20 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns - -from .views import PromptStudioOutputView - -prompt_doc_list = PromptStudioOutputView.as_view({"get": "list"}) -get_output_for_tool_default = PromptStudioOutputView.as_view( - {"get": "get_output_for_tool_default"} -) - -urlpatterns = format_suffix_patterns( - [ - path("prompt-output/", prompt_doc_list, name="prompt-doc-list"), - path( - "prompt-output/prompt-default-profile/", - get_output_for_tool_default, - name="prompt-default-profile-outputs", - ), - ] -) diff --git a/backend/prompt_studio/prompt_studio_output_manager/views.py b/backend/prompt_studio/prompt_studio_output_manager/views.py deleted file mode 100644 index 3c38cd6ec..000000000 --- a/backend/prompt_studio/prompt_studio_output_manager/views.py +++ /dev/null @@ -1,86 +0,0 @@ -import logging -from typing import Any, Optional - -from django.core.exceptions import ObjectDoesNotExist -from django.db.models import QuerySet -from django.http import HttpRequest -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio_output_manager.constants import ( - PromptOutputManagerErrorMessage, - PromptStudioOutputManagerKeys, -) -from prompt_studio.prompt_studio_output_manager.output_manager_helper import ( - OutputManagerHelper, -) -from prompt_studio.prompt_studio_output_manager.serializers import ( - PromptStudioOutputSerializer, -) -from rest_framework import status, viewsets -from rest_framework.exceptions import APIException -from rest_framework.response import Response -from rest_framework.versioning import URLPathVersioning -from utils.common_utils import CommonUtils -from utils.filtering import FilterHelper - -from .models import PromptStudioOutputManager - -logger = logging.getLogger(__name__) - - -class PromptStudioOutputView(viewsets.ModelViewSet): - versioning_class = URLPathVersioning - queryset = PromptStudioOutputManager.objects.all() - serializer_class = PromptStudioOutputSerializer - - def get_queryset(self) -> Optional[QuerySet]: - filter_args = FilterHelper.build_filter_args( - self.request, - PromptStudioOutputManagerKeys.TOOL_ID, - PromptStudioOutputManagerKeys.PROMPT_ID, - PromptStudioOutputManagerKeys.PROFILE_MANAGER, - PromptStudioOutputManagerKeys.DOCUMENT_MANAGER, - PromptStudioOutputManagerKeys.IS_SINGLE_PASS_EXTRACT, - ) - - # Get the query parameter for "is_single_pass_extract" - is_single_pass_extract_param = self.request.GET.get( - PromptStudioOutputManagerKeys.IS_SINGLE_PASS_EXTRACT, "false" - ) - - # Convert the string representation to a boolean value - is_single_pass_extract = CommonUtils.str_to_bool(is_single_pass_extract_param) - - filter_args[PromptStudioOutputManagerKeys.IS_SINGLE_PASS_EXTRACT] = ( - is_single_pass_extract - ) - - if filter_args: - queryset = PromptStudioOutputManager.objects.filter(**filter_args) - - return queryset - - def get_output_for_tool_default(self, request: HttpRequest) -> Response: - # Get the tool_id from request parameters - # Get the tool_id from request parameters - tool_id = request.GET.get("tool_id") - document_manager_id = request.GET.get("document_manager") - tool_validation_message = PromptOutputManagerErrorMessage.TOOL_VALIDATION - tool_not_found = PromptOutputManagerErrorMessage.TOOL_NOT_FOUND - if not tool_id: - raise APIException(detail=tool_validation_message, code=400) - - try: - # Fetch ToolStudioPrompt records based on tool_id - tool_studio_prompts = ToolStudioPrompt.objects.filter( - tool_id=tool_id - ).order_by("sequence_number") - except ObjectDoesNotExist: - raise APIException(detail=tool_not_found, code=400) - - # Invoke helper method to frame and fetch default response. - result: dict[str, Any] = OutputManagerHelper.fetch_default_output_response( - tool_studio_prompts=tool_studio_prompts, - document_manager_id=document_manager_id, - ) - - return Response(result, status=status.HTTP_200_OK) diff --git a/backend/prompt_studio/prompt_studio_output_manager_v2/output_manager_util.py b/backend/prompt_studio/prompt_studio_output_manager_v2/output_manager_util.py index 4a0352099..b5b6a957d 100644 --- a/backend/prompt_studio/prompt_studio_output_manager_v2/output_manager_util.py +++ b/backend/prompt_studio/prompt_studio_output_manager_v2/output_manager_util.py @@ -1,4 +1,3 @@ -from django.db.models import Count from prompt_studio.prompt_studio_output_manager_v2.models import ( PromptStudioOutputManager, ) @@ -11,41 +10,33 @@ def get_coverage( profile_manager_id: str, prompt_id: str = None, is_single_pass: bool = False, - ) -> dict[str, int]: + ) -> list[str]: """ Method to fetch coverage data for given tool and profile manager. Args: - tool (CustomTool): The tool instance or ID for which coverage is fetched. + tool_id (str): The ID of the tool for which coverage is fetched. profile_manager_id (str): The ID of the profile manager for which coverage is calculated. prompt_id (Optional[str]): The ID of the prompt (optional). - is_single_pass (Optional[bool]): Singlepass enabled or not + is_single_pass (Optional[bool]): Singlepass enabled or not. If provided, coverage is fetched for the specific prompt. Returns: - dict[str, int]: A dictionary containing coverage information. + dict[str, list[str]]: A dictionary containing coverage information. Keys are formatted as "coverage__". - Values are the count of documents associated with each prompt + Values are lists of document IDs associated with each prompt and profile combination. """ # TODO: remove singlepass reference - prompt_outputs = ( - PromptStudioOutputManager.objects.filter( - tool_id=tool_id, - profile_manager_id=profile_manager_id, - prompt_id=prompt_id, - is_single_pass_extract=is_single_pass, - ) - .values("prompt_id", "profile_manager_id") - .annotate(document_count=Count("document_manager_id")) - ) + prompt_outputs = PromptStudioOutputManager.objects.filter( + tool_id=tool_id, + profile_manager_id=profile_manager_id, + prompt_id=prompt_id, + is_single_pass_extract=is_single_pass, + ).values("prompt_id", "profile_manager_id", "document_manager_id") - coverage = {} + coverage = [] for prompt_output in prompt_outputs: - prompt_key = str(prompt_output["prompt_id"]) - profile_key = str(prompt_output["profile_manager_id"]) - coverage[f"coverage_{prompt_key}_{profile_key}"] = prompt_output[ - "document_count" - ] + coverage.append(str(prompt_output["document_manager_id"])) return coverage diff --git a/backend/prompt_studio/prompt_studio_registry/__init__.py b/backend/prompt_studio/prompt_studio_registry/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_registry/admin.py b/backend/prompt_studio/prompt_studio_registry/admin.py deleted file mode 100644 index 9f6bdeb83..000000000 --- a/backend/prompt_studio/prompt_studio_registry/admin.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib import admin - -from .models import PromptStudioRegistry - -admin.site.register(PromptStudioRegistry) diff --git a/backend/prompt_studio/prompt_studio_registry/apps.py b/backend/prompt_studio/prompt_studio_registry/apps.py deleted file mode 100644 index 5bd8c61d0..000000000 --- a/backend/prompt_studio/prompt_studio_registry/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class PromptStudioRegistry(AppConfig): - name = "prompt_studio.prompt_studio_registry" diff --git a/backend/prompt_studio/prompt_studio_registry/constants.py b/backend/prompt_studio/prompt_studio_registry/constants.py deleted file mode 100644 index 966f30826..000000000 --- a/backend/prompt_studio/prompt_studio_registry/constants.py +++ /dev/null @@ -1,109 +0,0 @@ -class PromptStudioRegistryKeys: - CREATED_BY = "created_by" - TOOL_ID = "tool_id" - NUMBER = "Number" - FLOAT = "Float" - PG_VECTOR = "Postgres pg_vector" - ANSWERS = "answers" - UNIQUE_FILE_ID = "unique_file_id" - PROMPT_REGISTRY_ID = "prompt_registry_id" - FILE_NAME = "file_name" - UNDEFINED = "undefined" - TABLE = "table" - RECORD = "record" - - -class PromptStudioRegistryErrors: - SERIALIZATION_FAILED = "Data Serialization Failed." - DUPLICATE_API = "It appears that a duplicate call may have been made." - CUSTOM_TOOL_EXISTS = "Custom tool with similiar configuration already exists" - - -class LogLevels: - INFO = "INFO" - ERROR = "ERROR" - DEBUG = "DEBUG" - RUN = "RUN" - - -# TODO: Update prompt studio constants to have a single source of truth -class JsonSchemaKey: - TYPE = "type" - TITLE = "title" - DEFAULT = "default" - ENUM = "enum" - DESCRIPTION = "description" - REQUIRED = "required" - STRING = "string" - PROCESSOR_TO_USE = "Processor to use" - AZURE_OPEN_AI = "Azure OpenAI" - PROPERTIES = "properties" - DISPLAY_NAME = "display_name" - FUNCTION_NAME = "function_name" - PARAMETERS = "parameters" - VERSIONS = "versions" - OUTPUT_TYPE = "output_type" - INPUT_TYPE = "input_type" - IS_CACHABLE = "is_cacheable" - REQUIRES = "requires" - DEFAULT_DESCRIPTION_PROCESSOR = "Use Unstract processor \ - if you do not want to use a cloud provider for privacy reasons" - NAME = "name" - ACTIVE = "active" - PROMPT = "prompt" - CHUNK_SIZE = "chunk-size" - PROMPTX = "promptx" - VECTOR_DB = "vector-db" - EMBEDDING = "embedding" - X2TEXT_ADAPTER = "x2text_adapter" - CHUNK_OVERLAP = "chunk-overlap" - LLM = "llm" - RETRIEVAL_STRATEGY = "retrieval-strategy" - SIMPLE = "simple" - TYPE = "type" - NUMBER = "number" - EMAIL = "email" - DATE = "date" - BOOLEAN = "boolean" - JSON = "json" - PREAMBLE = "preamble" - SIMILARITY_TOP_K = "similarity-top-k" - PROMPT_TOKENS = "prompt_tokens" - COMPLETION_TOKENS = "completion_tokens" - TOTAL_TOKENS = "total_tokens" - RESPONSE = "response" - POSTAMBLE = "postamble" - GRAMMAR = "grammar" - WORD = "word" - SYNONYMS = "synonyms" - OUTPUTS = "outputs" - SECTION = "section" - DEFAULT = "default" - AUTHOR = "author" - ICON = "icon" - REINDEX = "reindex" - TOOL_ID = "tool_id" - EMBEDDING_SUFFIX = "embedding_suffix" - FUNCTION_NAME = "function_name" - PROMPT_REGISTRY_ID = "prompt_registry_id" - NOTES = "NOTES" - TOOL_SETTINGS = "tool_settings" - ENABLE_CHALLENGE = "enable_challenge" - CHALLENGE_LLM = "challenge_llm" - ENABLE_SINGLE_PASS_EXTRACTION = "enable_single_pass_extraction" - SUMMARIZE_PROMPT = "summarize_prompt" - SUMMARIZE_AS_SOURCE = "summarize_as_source" - ENABLE_HIGHLIGHT = "enable_highlight" - PLATFORM_POSTAMBLE = "platform_postamble" - - -class SpecKey: - PROCESSOR = "processor" - SPEC = "spec" - OUTPUT_FOLDER = "outputFolder" - CREATE_OUTPUT_DOCUMENT = "createOutputDocument" - USE_CACHE = "useCache" - EMBEDDING_TRANSFORMER = "embeddingTransformer" - VECTOR_STORE = "vectorstore" - OUTPUT_TYPE = "outputType" - OUTPUT_PROCESSING = "outputProcessing" diff --git a/backend/prompt_studio/prompt_studio_registry/exceptions.py b/backend/prompt_studio/prompt_studio_registry/exceptions.py deleted file mode 100644 index cb0e3c71b..000000000 --- a/backend/prompt_studio/prompt_studio_registry/exceptions.py +++ /dev/null @@ -1,33 +0,0 @@ -from rest_framework.exceptions import APIException - - -class InternalError(APIException): - status_code = 500 - default_detail = "Internal service error." - - -class ToolDoesNotExist(APIException): - status_code = 500 - default_detail = "Tool does not exist." - - -class ToolSaveError(APIException): - status_code = 500 - default_detail = "Error while saving the tool." - - -class EmptyToolExportError(APIException): - status_code = 500 - default_detail = ( - "Prompt Studio project without prompts cannot be exported. " - "Please ensure there is at least one active prompt " - "that has been run before exporting." - ) - - -class InValidCustomToolError(APIException): - status_code = 500 - default_detail = ( - "This prompt studio project cannot be exported. It probably " - "has some empty or unexecuted prompts." - ) diff --git a/backend/prompt_studio/prompt_studio_registry/fields.py b/backend/prompt_studio/prompt_studio_registry/fields.py deleted file mode 100644 index 6e790fb3c..000000000 --- a/backend/prompt_studio/prompt_studio_registry/fields.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -from django.db import models - -logger = logging.getLogger(__name__) - - -class ToolPropertyJSONField(models.JSONField): - def from_db_value(self, value, expression, connection): # type: ignore - metadata = super().from_db_value(value, expression, connection) - return metadata - - -class ToolSpecJSONField(models.JSONField): - def from_db_value(self, value, expression, connection): # type: ignore - metadata = super().from_db_value(value, expression, connection) - return metadata - - -class ToolVariablesJSONField(models.JSONField): - def from_db_value(self, value, expression, connection): # type: ignore - metadata = super().from_db_value(value, expression, connection) - return metadata - - -class ToolMetadataJSONField(models.JSONField): - def from_db_value(self, value, expression, connection): # type: ignore - metadata = super().from_db_value(value, expression, connection) - return metadata diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/0001_initial.py b/backend/prompt_studio/prompt_studio_registry/migrations/0001_initial.py deleted file mode 100644 index cf140e76c..000000000 --- a/backend/prompt_studio/prompt_studio_registry/migrations/0001_initial.py +++ /dev/null @@ -1,87 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-20 08:04 - -import uuid - -import django.db.models.deletion -import prompt_studio.prompt_studio_registry.fields -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="PromptStudioRegistry", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "prompt_registry_id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "tool_property", - prompt_studio.prompt_studio_registry.fields.ToolPropertyJSONField( - db_column="tool_property", default=dict - ), - ), - ( - "tool_spec", - prompt_studio.prompt_studio_registry.fields.ToolSpecJSONField( - db_column="tool_spec", default=dict - ), - ), - ( - "tool_metadata", - prompt_studio.prompt_studio_registry.fields.ToolMetadataJSONField( - db_column="tool_metadata", default=dict - ), - ), - ( - "icon", - models.CharField( - db_comment="Tool icon in svg format", editable=False - ), - ), - ("url", models.CharField(editable=False)), - ("name", models.CharField(default="", editable=False)), - ("description", models.CharField(default="", editable=False)), - ( - "created_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_registry_created_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - editable=False, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="prompt_registry_modified_by", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/0002_remove_promptstudioregistry_updated_at_and_more.py b/backend/prompt_studio/prompt_studio_registry/migrations/0002_remove_promptstudioregistry_updated_at_and_more.py deleted file mode 100644 index 1410b7e14..000000000 --- a/backend/prompt_studio/prompt_studio_registry/migrations/0002_remove_promptstudioregistry_updated_at_and_more.py +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-23 19:02 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_registry", "0001_initial"), - ] - - operations = [ - migrations.RemoveField( - model_name="promptstudioregistry", - name="updated_at", - ), - migrations.AddField( - model_name="promptstudioregistry", - name="modified_at", - field=models.DateTimeField(auto_now=True), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/0003_alter_promptstudioregistry_tool_metadata_and_more.py b/backend/prompt_studio/prompt_studio_registry/migrations/0003_alter_promptstudioregistry_tool_metadata_and_more.py deleted file mode 100644 index 81b2d00f8..000000000 --- a/backend/prompt_studio/prompt_studio_registry/migrations/0003_alter_promptstudioregistry_tool_metadata_and_more.py +++ /dev/null @@ -1,43 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-01 11:58 - -import prompt_studio.prompt_studio_registry.fields -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ( - "prompt_studio_registry", - "0002_remove_promptstudioregistry_updated_at_and_more", - ), - ] - - operations = [ - migrations.AlterField( - model_name="promptstudioregistry", - name="tool_metadata", - field=prompt_studio.prompt_studio_registry.fields.ToolMetadataJSONField( - db_column="tool_metadata", - db_comment="Metadata from Prompt Studio", - default=dict, - ), - ), - migrations.AlterField( - model_name="promptstudioregistry", - name="tool_property", - field=prompt_studio.prompt_studio_registry.fields.ToolPropertyJSONField( - db_column="tool_property", - db_comment="PROPERTIES of the tool", - default=dict, - ), - ), - migrations.AlterField( - model_name="promptstudioregistry", - name="tool_spec", - field=prompt_studio.prompt_studio_registry.fields.ToolSpecJSONField( - db_column="tool_spec", - db_comment="SPEC of the tool", - default=dict, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/0004_promptstudioregistry_custom_tool.py b/backend/prompt_studio/prompt_studio_registry/migrations/0004_promptstudioregistry_custom_tool.py deleted file mode 100644 index 1bdee594e..000000000 --- a/backend/prompt_studio/prompt_studio_registry/migrations/0004_promptstudioregistry_custom_tool.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-06 03:55 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_core", "0003_merge_20240125_1501"), - ( - "prompt_studio_registry", - "0003_alter_promptstudioregistry_tool_metadata_and_more", - ), - ] - - operations = [ - migrations.AddField( - model_name="promptstudioregistry", - name="custom_tool", - field=models.OneToOneField( - editable=False, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="prompt_studio_registry", - to="prompt_studio_core.customtool", - ), - ) - ] diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/0005_delete_corrupt_tool_instance.py b/backend/prompt_studio/prompt_studio_registry/migrations/0005_delete_corrupt_tool_instance.py deleted file mode 100644 index d8d47e3f6..000000000 --- a/backend/prompt_studio/prompt_studio_registry/migrations/0005_delete_corrupt_tool_instance.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-06 19:36 -from django.db import migrations, transaction -from tool_instance.exceptions import ToolDoesNotExist -from tool_instance.models import ToolInstance -from tool_instance.tool_processor import ToolProcessor - - -def populate_tool_instances(apps, schema_editor): - ToolInstance = apps.get_model("tool_instance", "ToolInstance") - - # Get all tool instances - tool_instances = ToolInstance.objects.all() - with transaction.atomic(): - # Loop through each tool instance and call the function - for tool_instance in tool_instances: - try: - # Call the function to lookup the tool by tool_id - tool = ToolProcessor.get_tool_by_uid(tool_instance.tool_id) - except ToolDoesNotExist as e: - # Delete the tool_instance since its a stray exported tool - tool_instance.delete() - - -class Migration(migrations.Migration): - dependencies = [ - ("prompt_studio_registry", "0004_promptstudioregistry_custom_tool"), - ("tool_instance", "0001_initial"), - ] - - operations = [migrations.RunPython(populate_tool_instances)] diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/0006_promptstudioregistry_shared_to_org_and_more.py b/backend/prompt_studio/prompt_studio_registry/migrations/0006_promptstudioregistry_shared_to_org_and_more.py deleted file mode 100644 index df8b7e6be..000000000 --- a/backend/prompt_studio/prompt_studio_registry/migrations/0006_promptstudioregistry_shared_to_org_and_more.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-21 11:36 - -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("prompt_studio_registry", "0005_delete_corrupt_tool_instance"), - ] - - operations = [ - migrations.AddField( - model_name="promptstudioregistry", - name="shared_to_org", - field=models.BooleanField( - db_comment="Is the exported tool shared with entire org", - default=False, - ), - ), - migrations.AddField( - model_name="promptstudioregistry", - name="shared_users", - field=models.ManyToManyField( - related_name="shared_exported_tools", - to=settings.AUTH_USER_MODEL, - ), - ), - ] diff --git a/backend/prompt_studio/prompt_studio_registry/migrations/__init__.py b/backend/prompt_studio/prompt_studio_registry/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/prompt_studio/prompt_studio_registry/models.py b/backend/prompt_studio/prompt_studio_registry/models.py deleted file mode 100644 index 2ce06af3e..000000000 --- a/backend/prompt_studio/prompt_studio_registry/models.py +++ /dev/null @@ -1,91 +0,0 @@ -import uuid -from typing import Any - -from account.models import User -from django.db import models -from django.db.models import QuerySet -from prompt_studio.prompt_studio.models import CustomTool -from utils.models.base_model import BaseModel - -from .fields import ToolMetadataJSONField, ToolPropertyJSONField, ToolSpecJSONField - - -class PromptStudioRegistryModelManager(models.Manager): - def get_queryset(self) -> QuerySet[Any]: - return super().get_queryset() - - def list_tools(self, user: User) -> QuerySet[Any]: - return ( - self.get_queryset() - .filter(models.Q(shared_users=user) | models.Q(shared_to_org=True)) - .distinct("prompt_registry_id") - ) - - -class PromptStudioRegistry(BaseModel): - """Data model to export JSON fields needed for registering the Custom tool - to the tool registry. - - By default the tools will be added to private tool hub. - """ - - prompt_registry_id = models.UUIDField( - primary_key=True, default=uuid.uuid4, editable=False - ) - name = models.CharField(editable=False, default="") - description = models.CharField(editable=False, default="") - tool_property = ToolPropertyJSONField( - db_column="tool_property", - db_comment="PROPERTIES of the tool", - null=False, - blank=False, - default=dict, - ) - tool_spec = ToolSpecJSONField( - db_column="tool_spec", - db_comment="SPEC of the tool", - null=False, - blank=False, - default=dict, - ) - tool_metadata = ToolMetadataJSONField( - db_column="tool_metadata", - db_comment="Metadata from Prompt Studio", - null=False, - blank=False, - default=dict, - ) - icon = models.CharField(db_comment="Tool icon in svg format", editable=False) - url = models.CharField(editable=False) - custom_tool = models.OneToOneField( - CustomTool, - on_delete=models.CASCADE, - related_name="prompt_studio_registry", - editable=False, - null=True, - ) - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_registry_created_by", - null=True, - blank=True, - editable=False, - ) - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="prompt_registry_modified_by", - null=True, - blank=True, - editable=False, - ) - shared_to_org = models.BooleanField( - default=False, - db_comment="Is the exported tool shared with entire org", - ) - # Introduced field to establish M2M relation between users and tools. - # This will introduce intermediary table which relates both the models. - shared_users = models.ManyToManyField(User, related_name="shared_exported_tools") - - objects = PromptStudioRegistryModelManager() diff --git a/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py b/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py deleted file mode 100644 index effb4abc5..000000000 --- a/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py +++ /dev/null @@ -1,410 +0,0 @@ -import logging -from typing import Any, Optional - -from account.models import User -from adapter_processor.models import AdapterInstance -from django.conf import settings -from django.db import IntegrityError -from prompt_studio.modifier_loader import ModifierConfig -from prompt_studio.modifier_loader import load_plugins as load_modifier_plugins -from prompt_studio.prompt_profile_manager.models import ProfileManager -from prompt_studio.prompt_studio.models import ToolStudioPrompt -from prompt_studio.prompt_studio_core.models import CustomTool -from prompt_studio.prompt_studio_core.prompt_studio_helper import PromptStudioHelper -from prompt_studio.prompt_studio_output_manager.models import PromptStudioOutputManager -from unstract.tool_registry.dto import Properties, Spec, Tool - -from .constants import JsonSchemaKey, PromptStudioRegistryKeys -from .exceptions import ( - EmptyToolExportError, - InternalError, - InValidCustomToolError, - ToolSaveError, -) -from .models import PromptStudioRegistry -from .serializers import PromptStudioRegistrySerializer - -logger = logging.getLogger(__name__) -modifier_plugins = load_modifier_plugins() - - -class PromptStudioRegistryHelper: - """Class to register custom tools to tool studio registry. - - By default the exported tools will be private and will be executed - with the help of a proto tool. - """ - - @staticmethod - def frame_spec(tool: CustomTool) -> Spec: - """Method to return spec of the Custom tool. - - Args: - tool (CustomTool): Saved tool data - - Returns: - dict: spec dict - """ - properties = { - "challenge_llm": { - "type": "string", - "title": "Challenger LLM", - "adapterType": "LLM", - "description": "LLM to use for LLMChallenge", - "adapterIdKey": "challenge_llm_adapter_id", - }, - "enable_challenge": { - "type": "boolean", - "title": "Enable LLMChallenge", - "default": False, - "description": "Enables LLMChallenge", - }, - "summarize_as_source": { - "type": "boolean", - "title": "Enable SummarizedExtraction", - "default": False, - "description": "Enables SummarizedExtraction", - }, - "single_pass_extraction_mode": { - "type": "boolean", - "title": "Enable SinglePass Extraction", - "default": False, - "description": "Enables SinglePass Extraction", - }, - "enable_highlight": { - "type": "boolean", - "title": "Enable highlight", - "default": False, - "description": "Enables highlight", - }, - } - - spec = Spec( - title=str(tool.tool_id), - description=tool.description, - required=[JsonSchemaKey.CHALLENGE_LLM], - properties=properties, - ) - return spec - - @staticmethod - def frame_properties(tool: CustomTool) -> Properties: - """Method to return properties of the tool. - - Args: - tool (CustomTool): Saved custom tool data. - - Returns: - dict: Properties dict - """ - # TODO: Update for new architecture - tool_props = Properties( - display_name=tool.tool_name, - function_name=str(tool.tool_id), - description=tool.description, - ) - return tool_props - - @staticmethod - def get_tool_by_prompt_registry_id( - prompt_registry_id: str, - ) -> Optional[Tool]: - """Gets the `Tool` associated with a prompt registry ID if it exists. - - Args: - prompt_registry_id (str): Prompt registry ID to fetch for - - Returns: - Optional[Tool]: The `Tool` exported from Prompt Studio - """ - try: - prompt_registry_tool = PromptStudioRegistry.objects.get( - pk=prompt_registry_id - ) - # Suppress all exceptions to allow processing - except Exception as e: - logger.warning( - "Error while fetching for prompt registry " - f"ID {prompt_registry_id}: {e} " - ) - return None - return Tool( - tool_uid=prompt_registry_tool.prompt_registry_id, - properties=Properties.from_dict(prompt_registry_tool.tool_property), - spec=Spec.from_dict(prompt_registry_tool.tool_spec), - icon=prompt_registry_tool.icon, - image_url=settings.STRUCTURE_TOOL_IMAGE_URL, - image_name=settings.STRUCTURE_TOOL_IMAGE_NAME, - image_tag=settings.STRUCTURE_TOOL_IMAGE_TAG, - ) - - @staticmethod - def update_or_create_psr_tool( - custom_tool: CustomTool, - shared_with_org: bool, - user_ids: set[int], - force_export: bool, - ) -> PromptStudioRegistry: - """Updates or creates the PromptStudioRegistry record. - - This appears as a separate tool in the workflow and is mapped - 1:1 with the `CustomTool`. - - Args: - custom_tool (CustomTool): The instance of the custom tool to be updated - or created. - shared_with_org (bool): Flag indicating whether the tool is shared with - the organization. - user_ids (set[int]): A set of user IDs to whom the tool is shared. - force_export (bool): Indicates if the export is being forced. - - - Raises: - ToolSaveError - InternalError - - Returns: - obj: PromptStudioRegistry instance that was updated or created - """ - try: - properties: Properties = PromptStudioRegistryHelper.frame_properties( - tool=custom_tool - ) - spec: Spec = PromptStudioRegistryHelper.frame_spec(tool=custom_tool) - prompts: list[ToolStudioPrompt] = PromptStudioHelper.fetch_prompt_from_tool( - tool_id=custom_tool.tool_id - ) - metadata = PromptStudioRegistryHelper.frame_export_json( - tool=custom_tool, prompts=prompts, force_export=force_export - ) - - obj: PromptStudioRegistry - created: bool - obj, created = PromptStudioRegistry.objects.update_or_create( - custom_tool=custom_tool, - created_by=custom_tool.created_by, - defaults={ - "name": custom_tool.tool_name, - "tool_property": properties.to_dict(), - "tool_spec": spec.to_dict(), - "tool_metadata": metadata, - "icon": custom_tool.icon, - "description": custom_tool.description, - }, - ) - if created: - logger.info(f"PSR {obj.prompt_registry_id} was created") - else: - logger.info(f"PSR {obj.prompt_registry_id} was updated") - obj.modified_by = custom_tool.modified_by - obj.shared_to_org = shared_with_org - if not shared_with_org: - obj.shared_users.clear() - obj.shared_users.add(*user_ids) - # add prompt studio users - # for shared_user in custom_tool.shared_users: - obj.shared_users.add( - *custom_tool.shared_users.all().values_list("id", flat=True) - ) - # add prompt studio owner - obj.shared_users.add(custom_tool.created_by) - else: - obj.shared_users.clear() - obj.save() - return obj - except IntegrityError as error: - logger.error( - "Integrity Error - Error occurred while " - f"exporting custom tool : {error}" - ) - raise ToolSaveError - - @staticmethod - def frame_export_json( - tool: CustomTool, - prompts: list[ToolStudioPrompt], - force_export: bool, - ) -> dict[str, Any]: - export_metadata = {} - - prompt_grammer = tool.prompt_grammer - grammar_list = [] - grammer_dict = {} - outputs: list[dict[str, Any]] = [] - output: dict[str, Any] = {} - invalidated_prompts: list[str] = [] - invalidated_outputs: list[str] = [] - - if not prompts: - raise EmptyToolExportError() - - if prompt_grammer: - for word, synonyms in prompt_grammer.items(): - synonyms = prompt_grammer[word] - grammer_dict[JsonSchemaKey.WORD] = word - grammer_dict[JsonSchemaKey.SYNONYMS] = synonyms - grammar_list.append(grammer_dict) - grammer_dict = {} - - export_metadata[JsonSchemaKey.NAME] = tool.tool_name - export_metadata[JsonSchemaKey.DESCRIPTION] = tool.description - export_metadata[JsonSchemaKey.AUTHOR] = tool.author - export_metadata[JsonSchemaKey.TOOL_ID] = str(tool.tool_id) - - default_llm_profile = ProfileManager.get_default_llm_profile(tool) - challenge_llm_instance: Optional[AdapterInstance] = tool.challenge_llm - challenge_llm: Optional[str] = None - # Using default profile manager llm if challenge_llm is None - if challenge_llm_instance: - challenge_llm = str(challenge_llm_instance.id) - else: - challenge_llm = str(default_llm_profile.llm.id) - - embedding_suffix = "" - adapter_id = "" - vector_db = str(default_llm_profile.vector_store.id) - embedding_model = str(default_llm_profile.embedding_model.id) - llm = str(default_llm_profile.llm.id) - x2text = str(default_llm_profile.x2text.id) - - # Tool settings - tool_settings = {} - tool_settings[JsonSchemaKey.SUMMARIZE_PROMPT] = tool.summarize_prompt - tool_settings[JsonSchemaKey.SUMMARIZE_AS_SOURCE] = tool.summarize_as_source - tool_settings[JsonSchemaKey.PREAMBLE] = tool.preamble - tool_settings[JsonSchemaKey.POSTAMBLE] = tool.postamble - tool_settings[JsonSchemaKey.GRAMMAR] = grammar_list - tool_settings[JsonSchemaKey.LLM] = llm - tool_settings[JsonSchemaKey.X2TEXT_ADAPTER] = x2text - tool_settings[JsonSchemaKey.VECTOR_DB] = vector_db - tool_settings[JsonSchemaKey.EMBEDDING] = embedding_model - tool_settings[JsonSchemaKey.CHUNK_SIZE] = default_llm_profile.chunk_size - tool_settings[JsonSchemaKey.CHUNK_OVERLAP] = default_llm_profile.chunk_overlap - tool_settings[JsonSchemaKey.ENABLE_CHALLENGE] = tool.enable_challenge - tool_settings[JsonSchemaKey.CHALLENGE_LLM] = challenge_llm - tool_settings[JsonSchemaKey.ENABLE_SINGLE_PASS_EXTRACTION] = ( - tool.single_pass_extraction_mode - ) - tool_settings[JsonSchemaKey.ENABLE_HIGHLIGHT] = tool.enable_highlight - tool_settings[JsonSchemaKey.PLATFORM_POSTAMBLE] = getattr( - settings, JsonSchemaKey.PLATFORM_POSTAMBLE.upper(), "" - ) - - for prompt in prompts: - if prompt.prompt_type == JsonSchemaKey.NOTES or not prompt.active: - continue - - if not prompt.prompt: - invalidated_prompts.append(prompt.prompt_key) - continue - - if not prompt.profile_manager: - prompt.profile_manager = default_llm_profile - - if not force_export: - prompt_output = PromptStudioOutputManager.objects.filter( - tool_id=tool.tool_id, - prompt_id=prompt.prompt_id, - profile_manager=prompt.profile_manager, - ).all() - if not prompt_output: - invalidated_outputs.append(prompt.prompt_key) - continue - - vector_db = str(prompt.profile_manager.vector_store.id) - embedding_model = str(prompt.profile_manager.embedding_model.id) - llm = str(prompt.profile_manager.llm.id) - x2text = str(prompt.profile_manager.x2text.id) - adapter_id = str(prompt.profile_manager.embedding_model.adapter_id) - embedding_suffix = adapter_id.split("|")[0] - - output[JsonSchemaKey.PROMPT] = prompt.prompt - output[JsonSchemaKey.ACTIVE] = prompt.active - output[JsonSchemaKey.CHUNK_SIZE] = prompt.profile_manager.chunk_size - output[JsonSchemaKey.VECTOR_DB] = vector_db - output[JsonSchemaKey.EMBEDDING] = embedding_model - output[JsonSchemaKey.X2TEXT_ADAPTER] = x2text - output[JsonSchemaKey.CHUNK_OVERLAP] = prompt.profile_manager.chunk_overlap - output[JsonSchemaKey.LLM] = llm - output[JsonSchemaKey.PREAMBLE] = tool.preamble - output[JsonSchemaKey.POSTAMBLE] = tool.postamble - output[JsonSchemaKey.GRAMMAR] = grammar_list - output[JsonSchemaKey.TYPE] = prompt.enforce_type - output[JsonSchemaKey.NAME] = prompt.prompt_key - output[JsonSchemaKey.RETRIEVAL_STRATEGY] = ( - prompt.profile_manager.retrieval_strategy - ) - output[JsonSchemaKey.SIMILARITY_TOP_K] = ( - prompt.profile_manager.similarity_top_k - ) - output[JsonSchemaKey.SECTION] = prompt.profile_manager.section - output[JsonSchemaKey.REINDEX] = prompt.profile_manager.reindex - output[JsonSchemaKey.EMBEDDING_SUFFIX] = embedding_suffix - - if ( - prompt.enforce_type == PromptStudioRegistryKeys.TABLE - or prompt.enforce_type == PromptStudioRegistryKeys.RECORD - ): - for modifier_plugin in modifier_plugins: - cls = modifier_plugin[ModifierConfig.METADATA][ - ModifierConfig.METADATA_SERVICE_CLASS - ] - output = cls.update( - output=output, - tool_id=tool.tool_id, - prompt_id=prompt.prompt_id, - prompt=prompt.prompt, - ) - - outputs.append(output) - output = {} - vector_db = "" - embedding_suffix = "" - adapter_id = "" - llm = "" - embedding_model = "" - - if not outputs: - raise EmptyToolExportError() - - if invalidated_prompts: - raise InValidCustomToolError( - f"Cannot export tool. Prompt(s): {', '.join(invalidated_prompts)} " - "are empty. Please enter a valid prompt." - ) - if not force_export and invalidated_outputs: - raise InValidCustomToolError( - detail="Cannot export tool. Prompt(s):" - f" {', '.join(invalidated_outputs)}" - " were not run. Please run them before exporting.", - code="warning", - ) - export_metadata[JsonSchemaKey.TOOL_SETTINGS] = tool_settings - export_metadata[JsonSchemaKey.OUTPUTS] = outputs - return export_metadata - - @staticmethod - def fetch_json_for_registry(user: User) -> list[dict[str, Any]]: - try: - # filter the Prompt studio registry based on the users and org flag - prompt_studio_tools = PromptStudioRegistry.objects.list_tools(user) - pi_serializer = PromptStudioRegistrySerializer( - instance=prompt_studio_tools, many=True - ) - except Exception as error: - logger.error(f"Error occured while fetching tool for tool_id: {error}") - raise InternalError() - tool_metadata: dict[str, Any] = {} - tool_list = [] - for prompts in pi_serializer.data: - tool_metadata[JsonSchemaKey.NAME] = prompts.get(JsonSchemaKey.NAME) - tool_metadata[JsonSchemaKey.DESCRIPTION] = prompts.get( - JsonSchemaKey.DESCRIPTION - ) - tool_metadata[JsonSchemaKey.ICON] = prompts.get(JsonSchemaKey.ICON) - tool_metadata[JsonSchemaKey.FUNCTION_NAME] = prompts.get( - JsonSchemaKey.PROMPT_REGISTRY_ID - ) - tool_list.append(tool_metadata) - tool_metadata = {} - return tool_list diff --git a/backend/prompt_studio/prompt_studio_registry/serializers.py b/backend/prompt_studio/prompt_studio_registry/serializers.py deleted file mode 100644 index 605c3f368..000000000 --- a/backend/prompt_studio/prompt_studio_registry/serializers.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any - -from account.serializer import UserSerializer -from rest_framework import serializers - -from backend.serializers import AuditSerializer - -from .models import PromptStudioRegistry - - -class PromptStudioRegistrySerializer(AuditSerializer): - class Meta: - model = PromptStudioRegistry - fields = "__all__" - - -class PromptStudioRegistryInfoSerializer(AuditSerializer): - shared_users = UserSerializer(many=True) - prompt_studio_users = serializers.SerializerMethodField() - - class Meta: - model = PromptStudioRegistry - fields = ( - "name", - "shared_users", - "shared_to_org", - "prompt_studio_users", - ) - - def get_prompt_studio_users(self, obj: PromptStudioRegistry) -> Any: - - prompt_studio_users = obj.custom_tool.shared_users - return UserSerializer(prompt_studio_users, many=True).data - - -class ExportToolRequestSerializer(serializers.Serializer): - is_shared_with_org = serializers.BooleanField(default=False) - user_id = serializers.ListField(child=serializers.IntegerField(), required=False) - force_export = serializers.BooleanField(default=False) diff --git a/backend/prompt_studio/prompt_studio_registry/urls.py b/backend/prompt_studio/prompt_studio_registry/urls.py deleted file mode 100644 index 9cc93e05e..000000000 --- a/backend/prompt_studio/prompt_studio_registry/urls.py +++ /dev/null @@ -1,3 +0,0 @@ -from rest_framework.urlpatterns import format_suffix_patterns - -urlpatterns = format_suffix_patterns([]) diff --git a/backend/prompt_studio/prompt_studio_registry/views.py b/backend/prompt_studio/prompt_studio_registry/views.py deleted file mode 100644 index d050088a2..000000000 --- a/backend/prompt_studio/prompt_studio_registry/views.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging -from typing import Optional - -from django.db.models import QuerySet -from prompt_studio.prompt_studio_registry.constants import PromptStudioRegistryKeys -from prompt_studio.prompt_studio_registry.serializers import ( - PromptStudioRegistrySerializer, -) -from rest_framework import viewsets -from rest_framework.versioning import URLPathVersioning -from utils.filtering import FilterHelper - -from .models import PromptStudioRegistry - -logger = logging.getLogger(__name__) - - -class PromptStudioRegistryView(viewsets.ModelViewSet): - """Driver class to handle export and registering of custom tools to private - tool hub.""" - - versioning_class = URLPathVersioning - queryset = PromptStudioRegistry.objects.all() - serializer_class = PromptStudioRegistrySerializer - - def get_queryset(self) -> Optional[QuerySet]: - filterArgs = FilterHelper.build_filter_args( - self.request, - PromptStudioRegistryKeys.PROMPT_REGISTRY_ID, - ) - queryset = None - if filterArgs: - queryset = PromptStudioRegistry.objects.filter(**filterArgs) - - return queryset diff --git a/backend/pyproject.toml b/backend/pyproject.toml index ae2dd5187..00f609b79 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -32,7 +32,7 @@ dependencies = [ "python-socketio==5.9.0", # For log_events "social-auth-app-django==5.3.0", # For OAuth "social-auth-core==4.4.2", # For OAuth - "unstract-sdk~=0.54.0rc6", + "unstract-sdk~=0.54.0rc8", # ! IMPORTANT! # Indirect local dependencies usually need to be added in their own projects # as: https://pdm-project.org/latest/usage/dependency/#local-dependencies. diff --git a/backend/sample.env b/backend/sample.env index 354021577..e95b77dc9 100644 --- a/backend/sample.env +++ b/backend/sample.env @@ -82,9 +82,9 @@ REMOTE_PROMPT_STUDIO_FILE_PATH= # Structure Tool Image (Runs prompt studio exported tools) # https://hub.docker.com/r/unstract/tool-structure -STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.52" +STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.53" STRUCTURE_TOOL_IMAGE_NAME="unstract/tool-structure" -STRUCTURE_TOOL_IMAGE_TAG="0.0.52" +STRUCTURE_TOOL_IMAGE_TAG="0.0.53" # Feature Flags EVALUATION_SERVER_IP=unstract-flipt diff --git a/backend/workflow_manager/endpoint/admin.py b/backend/workflow_manager/endpoint/admin.py deleted file mode 100644 index 846f6b406..000000000 --- a/backend/workflow_manager/endpoint/admin.py +++ /dev/null @@ -1 +0,0 @@ -# Register your models here. diff --git a/backend/workflow_manager/endpoint/apps.py b/backend/workflow_manager/endpoint/apps.py deleted file mode 100644 index e880bba6f..000000000 --- a/backend/workflow_manager/endpoint/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class WorkflowEndpointConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "workflow_manager.endpoint" diff --git a/backend/workflow_manager/endpoint/base_connector.py b/backend/workflow_manager/endpoint/base_connector.py deleted file mode 100644 index 6c35910e5..000000000 --- a/backend/workflow_manager/endpoint/base_connector.py +++ /dev/null @@ -1,106 +0,0 @@ -import json -from typing import Any - -from django.conf import settings -from django.db import connection -from fsspec import AbstractFileSystem -from unstract.workflow_execution.execution_file_handler import ExecutionFileHandler -from utils.constants import Common - -from unstract.connectors.filesystems import connectors -from unstract.connectors.filesystems.unstract_file_system import UnstractFileSystem - - -class BaseConnector(ExecutionFileHandler): - """Base class for connectors providing common methods and utilities.""" - - def __init__( - self, workflow_id: str, execution_id: str, organization_id: str - ) -> None: - """Initialize the BaseConnector class. - - This class serves as a base for connectors and provides common - utilities. - """ - if not (settings.API_STORAGE_DIR and settings.WORKFLOW_DATA_DIR): - raise ValueError("Missed env API_STORAGE_DIR or WORKFLOW_DATA_DIR") - super().__init__(workflow_id, execution_id, organization_id) - # Directory path for storing execution-related files for API - self.api_storage_dir: str = self.create_execution_dir_path( - workflow_id, execution_id, organization_id, settings.API_STORAGE_DIR - ) - - def get_fsspec( - self, settings: dict[str, Any], connector_id: str - ) -> AbstractFileSystem: - """Get an fsspec file system based on the specified connector. - - Parameters: - - settings (dict): Connector-specific settings. - - connector_id (str): Identifier for the desired connector. - - Returns: - AbstractFileSystem: An fsspec file system instance. - - Raises: - KeyError: If the connector_id is not found in the connectors dictionary. - """ - return self.get_fs_connector( - settings=settings, connector_id=connector_id - ).get_fsspec_fs() - - def get_fs_connector( - self, settings: dict[str, Any], connector_id: str - ) -> UnstractFileSystem: - """Get an fs connector based specified connector settings. - - Parameters: - - settings (dict): Connector-specific settings. - - connector_id (str): Identifier for the desired connector. - - Returns: - UnstractFileSystem: An unstract fs connector instance. - """ - if connector_id not in connectors: - raise ValueError(f"Connector '{connector_id}' is not supported.") - connector = connectors[connector_id][Common.METADATA][Common.CONNECTOR] - return connector(settings) - - @classmethod - def get_json_schema(cls, file_path: str) -> dict[str, Any]: - """Load and return a JSON schema from the specified file path. - - Parameters: - - file_path (str): The path to the JSON schema file. - - Returns: - dict: The loaded JSON schema. - - Raises: - json.JSONDecodeError: If there is an issue decoding the JSON file. - """ - try: - with open(file_path, encoding="utf-8") as file: - schema: dict[str, Any] = json.load(file) - except OSError: - schema = {} - return schema - - @classmethod - def get_api_storage_dir_path(cls, workflow_id: str, execution_id: str) -> str: - """Get the directory path for storing api files. - - Parameters: - - workflow_id (str): Identifier for the workflow. - - execution_id (str): Identifier for the execution. - - organization_id (Optional[str]): Identifier for the organization - (default: None). - - Returns: - str: The directory path for the execution. - """ - organization_id = connection.tenant.schema_name - api_storage_dir: str = cls.create_execution_dir_path( - workflow_id, execution_id, organization_id, settings.API_STORAGE_DIR - ) - return api_storage_dir diff --git a/backend/workflow_manager/endpoint/constants.py b/backend/workflow_manager/endpoint/constants.py deleted file mode 100644 index 9a89763f6..000000000 --- a/backend/workflow_manager/endpoint/constants.py +++ /dev/null @@ -1,97 +0,0 @@ -class TableColumns: - CREATED_BY = "created_by" - CREATED_AT = "created_at" - PERMANENT_COLUMNS = ["created_by", "created_at"] - - -class DBConnectionClass: - SNOWFLAKE = "SnowflakeDB" - - -class Snowflake: - COLUMN_TYPES = [ - "VARCHAR", - "CHAR", - "CHARACTER", - "STRING", - "TEXT", - "BINARY", - "VARBINARY", - "DATE", - "DATETIME", - "TIME", - "TIMESTAMP", - "TIMESTAMP_LTZ", - "TIMESTAMP_NTZ", - "TIMESTAMP_TZ", - "BOOLEAN", - ] - - -class FileSystemConnector: - MAX_FILES = 100 - - -class WorkflowFileType: - SOURCE = "SOURCE" - INFILE = "INFILE" - METADATA_JSON = "METADATA.json" - - -class SourceKey: - FILE_EXTENSIONS = "fileExtensions" - PROCESS_SUB_DIRECTORIES = "processSubDirectories" - MAX_FILES = "maxFiles" - FOLDERS = "folders" - - -class DestinationKey: - TABLE = "table" - INCLUDE_AGENT = "includeAgent" - INCLUDE_TIMESTAMP = "includeTimestamp" - AGENT_NAME = "agentName" - COLUMN_MODE = "columnMode" - SINGLE_COLUMN_NAME = "singleColumnName" - PATH = "path" - OUTPUT_FOLDER = "outputFolder" - OVERWRITE_OUTPUT_DOCUMENT = "overwriteOutput" - FILE_PATH = "filePath" - EXECUTION_ID = "executionId" - - -class OutputJsonKey: - JSON_RESULT_KEY = "result" - - -class FileType: - PDF_DOCUMENTS = "PDF documents" - TEXT_DOCUMENTS = "Text documents" - IMAGES = "Images" - - -class FilePattern: - PDF_DOCUMENTS = ["*.pdf"] - TEXT_DOCUMENTS = ["*.txt", "*.doc", "*.docx"] - IMAGES = [ - "*.jpg", - "*.jpeg", - "*.png", - "*.gif", - "*.bmp", - "*.tif", - "*.tiff", - ] - - -class SourceConstant: - MAX_RECURSIVE_DEPTH = 10 - - -class ApiDeploymentResultStatus: - SUCCESS = "Success" - FAILED = "Failed" - - -class QueueResultStatus: - SUCCESS = "Success" - FAILED = "Failed" diff --git a/backend/workflow_manager/endpoint/database_utils.py b/backend/workflow_manager/endpoint/database_utils.py deleted file mode 100644 index 0f8a9fef4..000000000 --- a/backend/workflow_manager/endpoint/database_utils.py +++ /dev/null @@ -1,274 +0,0 @@ -import datetime -import json -import logging -import uuid -from typing import Any, Optional - -from utils.constants import Common -from workflow_manager.endpoint.constants import DBConnectionClass, TableColumns -from workflow_manager.endpoint.exceptions import UnstractDBException -from workflow_manager.workflow.enums import AgentName, ColumnModes - -from unstract.connectors.databases import connectors as db_connectors -from unstract.connectors.databases.exceptions import UnstractDBConnectorException -from unstract.connectors.databases.unstract_db import UnstractDB -from unstract.connectors.exceptions import ConnectorError - -logger = logging.getLogger(__name__) - - -class DatabaseUtils: - @staticmethod - def get_sql_values_for_query( - values: dict[str, Any], column_types: dict[str, str], cls_name: str - ) -> dict[str, str]: - """Making Sql Columns and Values for Query. - - Args: - values (dict[str, Any]): dictionary of columns and values - column_types (dict[str,str]): types of columns - cls (Any, optional): The database connection class (e.g., - DBConnectionClass.SNOWFLAKE) for handling database-specific - queries. - Defaults to None. - - Returns: - list[str]: _description_ - - Note: - - If `cls` is not provided or is None, the function assumes a - Default SQL database and makes values accordingly. - - If `cls` is provided and matches DBConnectionClass.SNOWFLAKE, - the function makes values using Snowflake-specific syntax. - - - Unstract creates id by default if table not exists. - If there is column 'id' in db table, it will insert - 'id' as uuid into the db table. - Else it will GET table details from INFORMATION SCHEMA and - insert into the table accordingly - """ - sql_values: dict[str, Any] = {} - for column in values: - if cls_name == DBConnectionClass.SNOWFLAKE: - col = column.lower() - type_x = column_types.get(col, "") - if type_x == "VARIANT": - values[column] = values[column].replace("'", "\\'") - sql_values[column] = f"parse_json($${values[column]}$$)" - else: - sql_values[column] = f"{values[column]}" - else: - # Default to Other SQL DBs - # TODO: Handle numeric types with no quotes - sql_values[column] = f"{values[column]}" - # If table has a column 'id', unstract inserts a unique value to it - # Oracle db has column 'ID' instead of 'id' - if any(key in column_types for key in ["id", "ID"]): - uuid_id = str(uuid.uuid4()) - sql_values["id"] = f"{uuid_id}" - return sql_values - - @staticmethod - def get_column_types( - conn_cls: Any, - table_name: str, - ) -> Any: - """Function to return connector db column and types by calling - connector table information schema. - - Args: - conn_cls (Any): DB Connection class - table_name (str): DB table-name - - Raises: - UnstractDBException: _description_ - - Returns: - Any: db column name and db column types of corresponding table - """ - try: - return conn_cls.get_information_schema(table_name=table_name) - except ConnectorError as e: - raise UnstractDBException(detail=e.message) from e - except Exception as e: - logger.error( - f"Error getting db-column-name and db-column-type " - f"for {table_name}: {str(e)}" - ) - raise - - @staticmethod - def get_columns_and_values( - column_mode_str: str, - data: Any, - file_path: str, - execution_id: str, - file_path_name: str = "file_path", - execution_id_name: str = "execution_id", - include_timestamp: bool = False, - include_agent: bool = False, - agent_name: Optional[str] = AgentName.UNSTRACT_DBWRITER.value, - single_column_name: str = "data", - ) -> dict[str, Any]: - """Generate a dictionary of columns and values based on specified - parameters. - - Args: - column_mode_str (str): The string representation of the column mode, - which determines how data is stored in the dictionary. - data (Any): The data to be stored in the dictionary. - include_timestamp (bool, optional): Whether to include the - current timestamp in the dictionary. Defaults to False. - include_agent (bool, optional): Whether to include the agent's name - in the dictionary. Defaults to False. - agent_name (str, optional): The name of the agent when include_agent - is true. Defaults to AgentName.UNSTRACT_DBWRITER. - single_column_name (str, optional): The name of the single column - when using 'WRITE_JSON_TO_A_SINGLE_COLUMN' mode. - Defaults to "data". - - Returns: - dict: A dictionary containing columns and values based on - the specified parameters. - """ - - values: dict[str, Any] = {} - try: - column_mode = ColumnModes(column_mode_str) - except ValueError: - # Handle the case where the string is not a valid enum value - column_mode = ColumnModes.WRITE_JSON_TO_A_SINGLE_COLUMN - - if include_agent and agent_name: - values[TableColumns.CREATED_BY] = agent_name - - if include_timestamp: - values[TableColumns.CREATED_AT] = datetime.datetime.now() - - if column_mode == ColumnModes.WRITE_JSON_TO_A_SINGLE_COLUMN: - if isinstance(data, str): - values[single_column_name] = data - else: - values[single_column_name] = json.dumps(data) - if column_mode == ColumnModes.SPLIT_JSON_INTO_COLUMNS: - if isinstance(data, dict): - values.update(data) - elif isinstance(data, str): - values[single_column_name] = data - else: - values[single_column_name] = json.dumps(data) - values[file_path_name] = file_path - values[execution_id_name] = execution_id - return values - - @staticmethod - def get_sql_query_data( - conn_cls: Any, - table_name: str, - values: dict[str, Any], - ) -> dict[str, Any]: - """Generate SQL columns and values for an insert query based on the - provided values and table schema. - - Args: - connector_cls: DB connection class - table_name (str): The name of the target table for the insert query. - values (dict[str, Any]): A dictionary containing column-value pairs - for the insert query. - - Returns: - list[str]: A list of SQL values suitable for use in an insert query. - - Note: - - This function determines the database type based on the - `engine` parameter. - - If the database is Snowflake (DBConnectionClass.SNOWFLAKE), - it handles Snowflake-specific SQL generation. - - For other SQL databases, it uses default SQL generation - based on column types. - """ - cls_name = conn_cls.__class__.__name__ - column_types: dict[str, str] = DatabaseUtils.get_column_types( - conn_cls=conn_cls, table_name=table_name - ) - sql_columns_and_values = DatabaseUtils.get_sql_values_for_query( - values=values, - column_types=column_types, - cls_name=cls_name, - ) - return sql_columns_and_values - - @staticmethod - def execute_write_query( - db_class: UnstractDB, - engine: Any, - table_name: str, - sql_keys: list[str], - sql_values: list[str], - ) -> None: - """Execute Insert Query. - - Args: - engine (Any): _description_ - table_name (str): table name - sql_keys (list[str]): columns - sql_values (list[str]): values - Notes: - - Snowflake does not support INSERT INTO ... VALUES ... - syntax when VARIANT columns are present (JSON). - So we need to use INSERT INTO ... SELECT ... syntax - - sql values can contain data with single quote. It needs to - """ - sql = db_class.get_sql_insert_query(table_name=table_name, sql_keys=sql_keys) - - logger.debug(f"inserting into table {table_name} with: {sql} query") - logger.debug(f"sql_values: {sql_values}") - - try: - db_class.execute_query( - engine=engine, - sql_query=sql, - sql_values=sql_values, - table_name=table_name, - sql_keys=sql_keys, - ) - except UnstractDBConnectorException as e: - raise UnstractDBException(detail=e.detail) from e - logger.debug(f"sucessfully inserted into table {table_name} with: {sql} query") - - @staticmethod - def get_db_class( - connector_id: str, connector_settings: dict[str, Any] - ) -> UnstractDB: - connector = db_connectors[connector_id][Common.METADATA][Common.CONNECTOR] - connector_class: UnstractDB = connector(connector_settings) - return connector_class - - @staticmethod - def create_table_if_not_exists( - db_class: UnstractDB, - engine: Any, - table_name: str, - database_entry: dict[str, Any], - ) -> None: - """Creates table if not exists. - - Args: - class_name (UnstractDB): Type of Unstract DB connector - table_name (str): _description_ - database_entry (dict[str, Any]): _description_ - - Raises: - e: _description_ - """ - sql = db_class.create_table_query( - table=table_name, database_entry=database_entry - ) - logger.debug(f"creating table {table_name} with: {sql} query") - try: - db_class.execute_query( - engine=engine, sql_query=sql, sql_values=None, table_name=table_name - ) - except UnstractDBConnectorException as e: - raise UnstractDBException(detail=e.detail) from e - logger.debug(f"successfully created table {table_name} with: {sql} query") diff --git a/backend/workflow_manager/endpoint/destination.py b/backend/workflow_manager/endpoint/destination.py deleted file mode 100644 index 4de330f40..000000000 --- a/backend/workflow_manager/endpoint/destination.py +++ /dev/null @@ -1,581 +0,0 @@ -import ast -import base64 -import json -import logging -import os -from typing import Any, Optional, Union - -import fsspec -import magic -from connector.models import ConnectorInstance -from django.db import connection -from fsspec.implementations.local import LocalFileSystem -from unstract.sdk.constants import ToolExecKey -from unstract.workflow_execution.constants import ToolOutputType -from workflow_manager.endpoint.base_connector import BaseConnector -from workflow_manager.endpoint.constants import ( - ApiDeploymentResultStatus, - DestinationKey, - QueueResultStatus, - WorkflowFileType, -) -from workflow_manager.endpoint.database_utils import DatabaseUtils -from workflow_manager.endpoint.dto import FileHash -from workflow_manager.endpoint.exceptions import ( - DestinationConnectorNotConfigured, - InvalidDestinationConnectionType, - InvalidToolOutputType, - MissingDestinationConnectionType, - ToolOutputTypeMismatch, -) -from workflow_manager.endpoint.models import WorkflowEndpoint -from workflow_manager.endpoint.queue_utils import QueueResult, QueueUtils -from workflow_manager.workflow.enums import ExecutionStatus -from workflow_manager.workflow.execution import WorkflowExecutionServiceHelper -from workflow_manager.workflow.file_history_helper import FileHistoryHelper -from workflow_manager.workflow.models.file_history import FileHistory -from workflow_manager.workflow.models.workflow import Workflow - -from backend.exceptions import UnstractFSException -from unstract.connectors.exceptions import ConnectorError - -logger = logging.getLogger(__name__) - - -class DestinationConnector(BaseConnector): - """A class representing a Destination connector for a workflow. - - This class extends the BaseConnector class and provides methods for - interacting with different types of destination connectors, - such as file system connectors and API connectors and DB connectors. - - Attributes: - workflow (Workflow): The workflow associated with - the destination connector. - """ - - def __init__( - self, - workflow: Workflow, - execution_id: str, - execution_service: Optional[WorkflowExecutionServiceHelper] = None, - ) -> None: - """Initialize a DestinationConnector object. - - Args: - workflow (Workflow): _description_ - """ - organization_id = connection.tenant.schema_name - super().__init__(workflow.id, execution_id, organization_id) - self.endpoint = self._get_endpoint_for_workflow(workflow=workflow) - self.source_endpoint = self._get_source_endpoint_for_workflow(workflow=workflow) - self.execution_id = execution_id - self.api_results: list[dict[str, Any]] = [] - self.queue_results: list[dict[str, Any]] = [] - self.execution_service = execution_service - - def _get_endpoint_for_workflow( - self, - workflow: Workflow, - ) -> WorkflowEndpoint: - """Get WorkflowEndpoint instance. - - Args: - workflow (Workflow): Workflow associated with the - destination connector. - - Returns: - WorkflowEndpoint: WorkflowEndpoint instance. - """ - endpoint: WorkflowEndpoint = WorkflowEndpoint.objects.get( - workflow=workflow, - endpoint_type=WorkflowEndpoint.EndpointType.DESTINATION, - ) - if endpoint.connector_instance: - endpoint.connector_instance.connector_metadata = ( - endpoint.connector_instance.metadata - ) - return endpoint - - def _get_source_endpoint_for_workflow( - self, - workflow: Workflow, - ) -> WorkflowEndpoint: - """Get WorkflowEndpoint instance. - - Args: - workflow (Workflow): Workflow associated with the - destination connector. - - Returns: - WorkflowEndpoint: WorkflowEndpoint instance. - """ - endpoint: WorkflowEndpoint = WorkflowEndpoint.objects.get( - workflow=workflow, - endpoint_type=WorkflowEndpoint.EndpointType.SOURCE, - ) - if endpoint.connector_instance: - endpoint.connector_instance.connector_metadata = ( - endpoint.connector_instance.metadata - ) - return endpoint - - def validate(self) -> None: - connection_type = self.endpoint.connection_type - connector: ConnectorInstance = self.endpoint.connector_instance - if connection_type is None: - raise MissingDestinationConnectionType() - if connection_type not in WorkflowEndpoint.ConnectionType.values: - raise InvalidDestinationConnectionType() - if ( - connection_type != WorkflowEndpoint.ConnectionType.API - and connection_type != WorkflowEndpoint.ConnectionType.MANUALREVIEW - and connector is None - ): - raise DestinationConnectorNotConfigured() - - def _push_data_to_queue( - self, - file_name: str, - workflow: Workflow, - input_file_path: str, - ) -> None: - result = self.get_result() - meta_data = self.get_metadata() - self._push_to_queue( - file_name=file_name, - workflow=workflow, - result=result, - input_file_path=input_file_path, - meta_data=meta_data, - ) - - def handle_output( - self, - file_name: str, - file_hash: FileHash, - workflow: Workflow, - input_file_path: str, - error: Optional[str] = None, - use_file_history: bool = True, - ) -> None: - """Handle the output based on the connection type.""" - connection_type = self.endpoint.connection_type - result: Optional[str] = None - metadata: Optional[str] = None - if error: - if connection_type == WorkflowEndpoint.ConnectionType.API: - self._handle_api_result(file_name=file_name, error=error, result=result) - return - - file_history = None - if use_file_history: - file_history = FileHistoryHelper.get_file_history( - workflow=workflow, cache_key=file_hash.file_hash - ) - if connection_type == WorkflowEndpoint.ConnectionType.FILESYSTEM: - self.copy_output_to_output_directory() - elif connection_type == WorkflowEndpoint.ConnectionType.DATABASE: - if ( - file_hash.file_destination - == WorkflowEndpoint.ConnectionType.MANUALREVIEW - ): - self._push_data_to_queue(file_name, workflow, input_file_path) - else: - self.insert_into_db(input_file_path=input_file_path) - elif connection_type == WorkflowEndpoint.ConnectionType.API: - result = self.get_result(file_history) - exec_metadata = self.get_metadata(file_history) - self._handle_api_result( - file_name=file_name, error=error, result=result, metadata=exec_metadata - ) - elif connection_type == WorkflowEndpoint.ConnectionType.MANUALREVIEW: - self._push_data_to_queue(file_name, workflow, input_file_path) - if self.execution_service: - self.execution_service.publish_log( - message=f"File '{file_name}' processed successfully" - ) - - if use_file_history and not file_history: - FileHistoryHelper.create_file_history( - cache_key=file_hash.file_hash, - workflow=workflow, - status=ExecutionStatus.COMPLETED, - result=result, - metadata=metadata, - file_name=file_name, - ) - - def copy_output_to_output_directory(self) -> None: - """Copy output to the destination directory.""" - connector: ConnectorInstance = self.endpoint.connector_instance - connector_settings: dict[str, Any] = connector.connector_metadata - destination_configurations: dict[str, Any] = self.endpoint.configuration - root_path = connector_settings.get(DestinationKey.PATH, "") - - output_directory = str( - destination_configurations.get(DestinationKey.OUTPUT_FOLDER, "/") - ) - destination_fs = self.get_fs_connector( - settings=connector_settings, connector_id=connector.connector_id - ) - output_directory = destination_fs.get_connector_root_dir( - input_dir=output_directory, root_path=root_path - ) - logger.debug(f"destination output directory {output_directory}") - destination_volume_path = os.path.join( - self.execution_dir, ToolExecKey.OUTPUT_DIR - ) - - try: - destination_fs.create_dir_if_not_exists(input_dir=output_directory) - - # Traverse local directory and create the same structure in the - # output_directory - for root, dirs, files in os.walk(destination_volume_path): - for dir_name in dirs: - current_dir = os.path.join( - output_directory, - os.path.relpath(root, destination_volume_path), - dir_name, - ) - destination_fs.create_dir_if_not_exists(input_dir=current_dir) - - for file_name in files: - source_path = os.path.join(root, file_name) - destination_path = os.path.join( - output_directory, - os.path.relpath(root, destination_volume_path), - file_name, - ) - destination_fs.upload_file_to_storage( - source_path=source_path, destination_path=destination_path - ) - except ConnectorError as e: - raise UnstractFSException(core_err=e) from e - - def insert_into_db(self, input_file_path: str) -> None: - """Insert data into the database.""" - connector_instance: ConnectorInstance = self.endpoint.connector_instance - connector_settings: dict[str, Any] = connector_instance.metadata - destination_configurations: dict[str, Any] = self.endpoint.configuration - table_name: str = str(destination_configurations.get(DestinationKey.TABLE)) - include_agent: bool = bool( - destination_configurations.get(DestinationKey.INCLUDE_AGENT, False) - ) - include_timestamp = bool( - destination_configurations.get(DestinationKey.INCLUDE_TIMESTAMP, False) - ) - agent_name = str(destination_configurations.get(DestinationKey.AGENT_NAME)) - column_mode = str(destination_configurations.get(DestinationKey.COLUMN_MODE)) - single_column_name = str( - destination_configurations.get(DestinationKey.SINGLE_COLUMN_NAME, "data") - ) - file_path_name = str( - destination_configurations.get(DestinationKey.FILE_PATH, "file_path") - ) - execution_id_name = str( - destination_configurations.get(DestinationKey.EXECUTION_ID, "execution_id") - ) - data = self.get_result() - # If data is None, don't execute CREATE or INSERT query - if not data: - return - # Remove metadata from result - # Tool text-extractor returns data in the form of string. - # Don't pop out metadata in this case. - if isinstance(data, dict): - data.pop("metadata", None) - values = DatabaseUtils.get_columns_and_values( - column_mode_str=column_mode, - data=data, - include_timestamp=include_timestamp, - include_agent=include_agent, - agent_name=agent_name, - single_column_name=single_column_name, - file_path_name=file_path_name, - execution_id_name=execution_id_name, - file_path=input_file_path, - execution_id=self.execution_id, - ) - db_class = DatabaseUtils.get_db_class( - connector_id=connector_instance.connector_id, - connector_settings=connector_settings, - ) - engine = db_class.get_engine() - DatabaseUtils.create_table_if_not_exists( - db_class=db_class, - engine=engine, - table_name=table_name, - database_entry=values, - ) - sql_columns_and_values = DatabaseUtils.get_sql_query_data( - conn_cls=db_class, - table_name=table_name, - values=values, - ) - DatabaseUtils.execute_write_query( - db_class=db_class, - engine=engine, - table_name=table_name, - sql_keys=list(sql_columns_and_values.keys()), - sql_values=list(sql_columns_and_values.values()), - ) - - def _handle_api_result( - self, - file_name: str, - error: Optional[str] = None, - result: Optional[str] = None, - metadata: Optional[dict[str, Any]] = None, - ) -> None: - """Handle the API result. - - This method is responsible for handling the API result. - It appends the file name and result to the 'results' list for API resp. - - Args: - file_name (str): The name of the file. - result (Optional[str], optional): The result of the API call. - Defaults to None. - - Returns: - None - """ - api_result: dict[str, Any] = {"file": file_name} - if error: - api_result.update( - {"status": ApiDeploymentResultStatus.FAILED, "error": error} - ) - else: - if result: - api_result.update( - { - "status": ApiDeploymentResultStatus.SUCCESS, - "result": result, - "metadata": metadata, - } - ) - else: - api_result.update( - {"status": ApiDeploymentResultStatus.SUCCESS, "result": ""} - ) - self.api_results.append(api_result) - - def parse_string(self, original_string: str) -> Any: - """Parse the given string, attempting to evaluate it as a Python - literal. - ex: a json string to dict method - Parameters: - - original_string (str): The input string to be parsed. - - Returns: - - Any: The parsed result. If the string can be evaluated as a Python - literal, the result of the evaluation is returned. - If not, the original string is returned unchanged. - - Note: - This function uses `ast.literal_eval` to attempt parsing the string as a - Python literal. If parsing fails due to a SyntaxError or ValueError, - the original string is returned. - - Example: - >>> parser.parse_string("42") - 42 - >>> parser.parse_string("[1, 2, 3]") - [1, 2, 3] - >>> parser.parse_string("Hello, World!") - 'Hello, World!' - """ - try: - # Try to evaluate as a Python literal - python_literal = ast.literal_eval(original_string) - return python_literal - except (SyntaxError, ValueError): - # If evaluating as a Python literal fails, - # assume it's a plain string - return original_string - - def get_result(self, file_history: Optional[FileHistory] = None) -> Optional[Any]: - """Get result data from the output file. - - Returns: - Union[dict[str, Any], str]: Result data. - """ - if file_history and file_history.result: - return self.parse_string(file_history.result) - output_file = os.path.join(self.execution_dir, WorkflowFileType.INFILE) - metadata: dict[str, Any] = self.get_workflow_metadata() - output_type = self.get_output_type(metadata) - result: Union[dict[str, Any], str] = "" - try: - # TODO: SDK handles validation; consider removing here. - mime = magic.Magic() - file_type = mime.from_file(output_file) - if output_type == ToolOutputType.JSON: - if "JSON" not in file_type: - logger.error(f"Output type json mismatched {file_type}") - raise ToolOutputTypeMismatch() - with open(output_file) as file: - result = json.load(file) - elif output_type == ToolOutputType.TXT: - if "JSON" in file_type: - logger.error(f"Output type txt mismatched {file_type}") - raise ToolOutputTypeMismatch() - with open(output_file) as file: - result = file.read() - result = result.encode("utf-8").decode("unicode-escape") - else: - raise InvalidToolOutputType() - except (FileNotFoundError, json.JSONDecodeError) as err: - logger.error(f"Error while getting result {err}") - return result - - def get_metadata( - self, file_history: Optional[FileHistory] = None - ) -> Optional[dict[str, Any]]: - """Get meta_data from the output file. - - Returns: - Union[dict[str, Any], str]: Meta data. - """ - if file_history and file_history.meta_data: - return self.parse_string(file_history.meta_data) - metadata: dict[str, Any] = self.get_workflow_metadata() - - return metadata - - def delete_execution_directory(self) -> None: - """Delete the execution directory. - - Returns: - None - """ - fs: LocalFileSystem = fsspec.filesystem("file") - fs.rm(self.execution_dir, recursive=True) - self.delete_api_storage_dir(self.workflow_id, self.execution_id) - - @classmethod - def delete_api_storage_dir(cls, workflow_id: str, execution_id: str) -> None: - """Delete the api storage path. - - Returns: - None - """ - api_storage_dir = cls.get_api_storage_dir_path( - workflow_id=workflow_id, execution_id=execution_id - ) - fs: LocalFileSystem = fsspec.filesystem("file") - fs.rm(api_storage_dir, recursive=True) - - @classmethod - def create_endpoint_for_workflow( - cls, - workflow: Workflow, - ) -> None: - """Create a workflow endpoint for the destination. - - Args: - workflow (Workflow): Workflow for which the endpoint is created. - """ - endpoint = WorkflowEndpoint( - workflow=workflow, - endpoint_type=WorkflowEndpoint.EndpointType.DESTINATION, - ) - endpoint.save() - - @classmethod - def get_json_schema_for_database(cls) -> dict[str, Any]: - """Get JSON schema for the database. - - Returns: - dict[str, Any]: JSON schema for the database. - """ - schema_path = os.path.join( - os.path.dirname(__file__), "static", "dest", "db.json" - ) - return cls.get_json_schema(file_path=schema_path) - - @classmethod - def get_json_schema_for_file_system(cls) -> dict[str, Any]: - """Get JSON schema for the file system. - - Returns: - dict[str, Any]: JSON schema for the file system. - """ - schema_path = os.path.join( - os.path.dirname(__file__), "static", "dest", "file.json" - ) - return cls.get_json_schema(file_path=schema_path) - - @classmethod - def get_json_schema_for_api(cls) -> dict[str, Any]: - """Json schema for api. - - Returns: - dict[str, Any]: _description_ - """ - schema_path = os.path.join( - os.path.dirname(__file__), "static", "dest", "api.json" - ) - return cls.get_json_schema(file_path=schema_path) - - def _push_to_queue( - self, - file_name: str, - workflow: Workflow, - result: Optional[str] = None, - input_file_path: Optional[str] = None, - meta_data: Optional[dict[str, Any]] = None, - ) -> None: - """Handle the Manual Review QUEUE result. - - This method is responsible for pushing the input file and result to - review queue. - Args: - file_name (str): The name of the file. - workflow (Workflow): The workflow object containing - details about the workflow. - result (Optional[str], optional): The result of the API call. - Defaults to None. - input_file_path (Optional[str], optional): - The path to the input file. - Defaults to None. - meta_data (Optional[dict[str, Any]], optional): - A dictionary containing additional - metadata related to the file. Defaults to None. - - Returns: - None - """ - if not result: - return - connector: ConnectorInstance = self.source_endpoint.connector_instance - connector_settings: dict[str, Any] = connector.connector_metadata - - source_fs = self.get_fsspec( - settings=connector_settings, connector_id=connector.connector_id - ) - with source_fs.open(input_file_path, "rb") as remote_file: - whisper_hash = None - file_content = remote_file.read() - # Convert file content to a base64 encoded string - file_content_base64 = base64.b64encode(file_content).decode("utf-8") - q_name = f"review_queue_{self.organization_id}_{workflow.id}" - if meta_data: - whisper_hash = meta_data.get("whisper-hash") - else: - whisper_hash = None - queue_result = QueueResult( - file=file_name, - status=QueueResultStatus.SUCCESS, - result=result, - workflow_id=str(self.workflow_id), - file_content=file_content_base64, - whisper_hash=whisper_hash, - ).to_dict() - # Convert the result dictionary to a JSON string - queue_result_json = json.dumps(queue_result) - conn = QueueUtils.get_queue_inst() - # Enqueue the JSON string - conn.enqueue(queue_name=q_name, message=queue_result_json) diff --git a/backend/workflow_manager/endpoint/dto.py b/backend/workflow_manager/endpoint/dto.py deleted file mode 100644 index 0e7aa6fd9..000000000 --- a/backend/workflow_manager/endpoint/dto.py +++ /dev/null @@ -1,36 +0,0 @@ -import json -from dataclasses import dataclass -from typing import Any, Optional - - -@dataclass -class FileHash: - file_path: str - file_hash: str - file_name: str - source_connection_type: str - file_destination: Optional[tuple[str, str]] = ( - None # To which destination this file wants to go for MRQ percentage - ) - is_executed: bool = False - - def to_json(self) -> dict[str, Any]: - return { - "file_path": self.file_path, - "file_hash": self.file_hash, - "file_name": self.file_name, - "source_connection_type": self.source_connection_type, - "file_destination": self.file_destination, - "is_executed": self.is_executed, - } - - @staticmethod - def from_json(json_str_or_dict: Any) -> "FileHash": - """Deserialize a JSON string or dictionary to a FileHash instance.""" - if isinstance(json_str_or_dict, dict): - # If already a dictionary, assume it's in the right format - data = json_str_or_dict - else: - # Otherwise, assume it's a JSON string - data = json.loads(json_str_or_dict) - return FileHash(**data) diff --git a/backend/workflow_manager/endpoint/endpoint_utils.py b/backend/workflow_manager/endpoint/endpoint_utils.py deleted file mode 100644 index 467ddb7e6..000000000 --- a/backend/workflow_manager/endpoint/endpoint_utils.py +++ /dev/null @@ -1,30 +0,0 @@ -from workflow_manager.endpoint.destination import DestinationConnector -from workflow_manager.endpoint.models import WorkflowEndpoint -from workflow_manager.endpoint.source import SourceConnector -from workflow_manager.workflow.models.workflow import Workflow -from workflow_manager.workflow.workflow_helper import WorkflowHelper - - -class WorkflowEndpointUtils: - @staticmethod - def create_endpoints_for_workflow(workflow: Workflow) -> None: - """Create endpoints for a given workflow. This method creates both - source and destination endpoints for the specified workflow. - - Parameters: - workflow (Workflow): The workflow for which - the endpoints need to be created. - - Returns: - None - """ - SourceConnector.create_endpoint_for_workflow(workflow) - DestinationConnector.create_endpoint_for_workflow(workflow) - - @staticmethod - def get_endpoints_for_workflow(workflow_id: str) -> list[WorkflowEndpoint]: - workflow = WorkflowHelper.get_workflow_by_id(workflow_id) - endpoints: list[WorkflowEndpoint] = WorkflowEndpoint.objects.filter( - workflow=workflow - ) - return endpoints diff --git a/backend/workflow_manager/endpoint/exceptions.py b/backend/workflow_manager/endpoint/exceptions.py deleted file mode 100644 index 9a1b03488..000000000 --- a/backend/workflow_manager/endpoint/exceptions.py +++ /dev/null @@ -1,107 +0,0 @@ -from typing import Optional - -from rest_framework.exceptions import APIException - - -class InvalidInputDirectory(APIException): - status_code = 400 - default_detail = "The provided path is not a valid directory." - - def __init__( - self, - dir: Optional[str] = None, - detail: Optional[str] = None, - code: Optional[str] = None, - ): - if dir: - detail = self.default_detail.replace("path", f"path '{dir}'") - super().__init__(detail, code) - - -class InvalidSourceConnectionType(APIException): - status_code = 400 - default_detail = "The provided source connection type is invalid." - - -class InvalidDestinationConnectionType(APIException): - status_code = 400 - default_detail = "The provided destination connection type is invalid." - - -class MissingSourceConnectionType(APIException): - status_code = 400 - default_detail = "The source connection type is missing." - - -class MissingDestinationConnectionType(APIException): - status_code = 400 - default_detail = "The destination connection type is missing." - - -class SourceConnectorNotConfigured(APIException): - status_code = 400 - default_detail = "The source connector is not configured" - - -class DestinationConnectorNotConfigured(APIException): - status_code = 400 - default_detail = "The destination connector is not configured" - - -class FileHashNotFound(APIException): - status_code = 500 - default_detail = "Internal server error: File hash not found." - - -class FileHashMismatched(APIException): - status_code = 400 - default_detail = ( - "The file's hash does not match the expected value. " - "The file may have been altered." - ) - - -class ToolMetadataNotFound(APIException): - status_code = 500 - default_detail = "Internal server error: Tool metadata not found." - - -class OrganizationIdNotFound(APIException): - status_code = 404 - default_detail = "The organization ID could not be found" - - -class InvalidToolOutputType(APIException): - status_code = 500 - default_detail = "Invalid output type is returned from tool" - - -class ToolOutputTypeMismatch(APIException): - status_code = 400 - default_detail = ( - "The data type of the tool's output does not match the expected type." - ) - - -class BigQueryTableNotFound(APIException): - status_code = 400 - default_detail = ( - "Please enter correct correct bigquery table in the form " - "{table}.{schema}.{database}." - ) - - -class UnstractDBException(APIException): - default_detail = "Error creating/inserting to database. " - - def __init__(self, detail: str = default_detail) -> None: - status_code = 500 - super().__init__(detail=detail, code=status_code) - - -class UnstractQueueException(APIException): - default_detail = "Error creating/inserting to Queue. " - - def __init__(self, detail: str = default_detail) -> None: - status_code = 500 - super().__init__(detail=detail, code=status_code) diff --git a/backend/workflow_manager/endpoint/migrations/0001_initial.py b/backend/workflow_manager/endpoint/migrations/0001_initial.py deleted file mode 100644 index 2f714ce6b..000000000 --- a/backend/workflow_manager/endpoint/migrations/0001_initial.py +++ /dev/null @@ -1,88 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-23 11:18 - -import uuid - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - ("connector", "0001_initial"), - ("workflow", "0001_initial"), - ] - - operations = [ - migrations.CreateModel( - name="WorkflowEndpoint", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "endpoint_type", - models.CharField( - choices=[ - ("SOURCE", "Source connector"), - ("DESTINATION", "Destination Connector"), - ], - db_comment="Endpoint type (source or destination)", - editable=False, - ), - ), - ( - "connection_type", - models.CharField( - blank=True, - choices=[ - ("FILESYSTEM", "FileSystem connector"), - ("DATABASE", "Database Connector"), - ("API", "API Connector"), - ], - db_comment="Connection type (Filesystem, Database or API)", - ), - ), - ( - "configuration", - models.JSONField( - blank=True, - db_comment="Configuration in JSON format", - null=True, - ), - ), - ( - "connector_instance", - models.ForeignKey( - db_comment="Foreign key from ConnectorInstance model", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="connector.connectorinstance", - ), - ), - ( - "workflow", - models.ForeignKey( - db_comment="Foreign key from Workflow model", - editable=False, - on_delete=django.db.models.deletion.CASCADE, - to="workflow.workflow", - ), - ), - ], - options={ - "verbose_name": "Workflow Endpoint", - "verbose_name_plural": "Workflow Endpoints", - "db_table": "workflow_endpoints", - }, - ), - ] diff --git a/backend/workflow_manager/endpoint/migrations/0002_alter_workflowendpoint_connection_type.py b/backend/workflow_manager/endpoint/migrations/0002_alter_workflowendpoint_connection_type.py deleted file mode 100644 index d0e56e146..000000000 --- a/backend/workflow_manager/endpoint/migrations/0002_alter_workflowendpoint_connection_type.py +++ /dev/null @@ -1,27 +0,0 @@ -# Generated by Django 4.2.1 on 2024-06-06 06:26 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("endpoint", "0001_initial"), - ] - - operations = [ - migrations.AlterField( - model_name="workflowendpoint", - name="connection_type", - field=models.CharField( - blank=True, - choices=[ - ("FILESYSTEM", "FileSystem connector"), - ("DATABASE", "Database Connector"), - ("API", "API Connector"), - ("APPDEPLOYMENT", "App Deployment"), - ], - db_comment="Connection type (Filesystem, Database or API)", - ), - ), - ] diff --git a/backend/workflow_manager/endpoint/migrations/0003_alter_workflowendpoint_connection_type.py b/backend/workflow_manager/endpoint/migrations/0003_alter_workflowendpoint_connection_type.py deleted file mode 100644 index c2da4a302..000000000 --- a/backend/workflow_manager/endpoint/migrations/0003_alter_workflowendpoint_connection_type.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 4.2.1 on 2024-07-04 05:44 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("endpoint", "0002_alter_workflowendpoint_connection_type"), - ] - - operations = [ - migrations.AlterField( - model_name="workflowendpoint", - name="connection_type", - field=models.CharField( - blank=True, - choices=[ - ("FILESYSTEM", "FileSystem connector"), - ("DATABASE", "Database Connector"), - ("API", "API Connector"), - ("APPDEPLOYMENT", "App Deployment"), - ("MANUALREVIEW", "Manual Review Queue Connector"), - ], - db_comment="Connection type (Filesystem, Database, API or Manualreview)", - ), - ), - ] diff --git a/backend/workflow_manager/endpoint/migrations/0004_allow_multi_source_inputs_for_fs.py b/backend/workflow_manager/endpoint/migrations/0004_allow_multi_source_inputs_for_fs.py deleted file mode 100644 index 53c47f62c..000000000 --- a/backend/workflow_manager/endpoint/migrations/0004_allow_multi_source_inputs_for_fs.py +++ /dev/null @@ -1,93 +0,0 @@ -# Generated by Django 4.2.1 on 2024-08-21 02:56 - -from typing import Any, Optional - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ("endpoint", "0003_alter_workflowendpoint_connection_type"), - ] - - def migrate_to_support_multiple_folder_input(apps: Any, schema_editor: Any) -> None: - """Migrates the data for multiple folders as source input. - - Converts a string field `rootFolder` in `configuration` JSON to - accept a list of values. Empty strings are converted into '/' to - handle reads from the root of the file system. - - Parameters: - apps (Any): The registry of installed applications. - schema_editor (Any): The schema editor for the database operation. - - Returns: - None: This method does not return anything. - """ - WorkflowEndpoint = apps.get_model("endpoint", "WorkflowEndpoint") - - endpoints_to_update = WorkflowEndpoint.objects.filter(endpoint_type="SOURCE") - for endpoint in endpoints_to_update: - configuration: Optional[dict[str, Any]] = endpoint.configuration - # Skip in case of partially initialized sources - if not configuration: - continue - root_folder = configuration.get("rootFolder", "") - - # Convert rootFolder to folders list - if not root_folder: - folders = ["/"] - else: - folders = [root_folder] - - # Update the configuration JSON - configuration["folders"] = folders - - # Save the updated configuration back to the model - endpoint.configuration = configuration - endpoint.save() - - def migrate_to_support_single_folder_input(apps: Any, schema_editor: Any) -> None: - """Reverses the migration by converting folders list back to a string. - - Takes the first element of the `folders` list and reassigns it to - `rootFolder`. If the list is empty, assigns an empty string to `rootFolder`. - - Parameters: - apps (Any): The registry of installed applications. - schema_editor (Any): The schema editor for the database operation. - - Returns: - None: This method does not return anything. - """ - WorkflowEndpoint = apps.get_model("endpoint", "WorkflowEndpoint") - - endpoints_to_update = WorkflowEndpoint.objects.filter(endpoint_type="SOURCE") - - for endpoint in endpoints_to_update: - configuration: Optional[dict[str, Any]] = endpoint.configuration - # Skip in case of partially initialized sources - if not configuration: - continue - folders = configuration.get("folders", []) - - # Convert the folders list back to a single rootFolder string - if folders: - root_folder = folders[0] - else: - root_folder = "" - - # Update the configuration JSON - configuration["rootFolder"] = root_folder - - # Save the updated configuration back to the model - endpoint.configuration = configuration - endpoint.save() - - operations = [ - migrations.RunPython( - migrate_to_support_multiple_folder_input, - reverse_code=migrate_to_support_single_folder_input, - ), - ] diff --git a/backend/workflow_manager/endpoint/migrations/__init__.py b/backend/workflow_manager/endpoint/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/workflow_manager/endpoint/models.py b/backend/workflow_manager/endpoint/models.py deleted file mode 100644 index f49ca1474..000000000 --- a/backend/workflow_manager/endpoint/models.py +++ /dev/null @@ -1,53 +0,0 @@ -import uuid - -from connector.models import ConnectorInstance -from django.db import models -from utils.models.base_model import BaseModel -from workflow_manager.workflow.models.workflow import Workflow - - -class WorkflowEndpoint(BaseModel): - class EndpointType(models.TextChoices): - SOURCE = "SOURCE", "Source connector" - DESTINATION = "DESTINATION", "Destination Connector" - - class ConnectionType(models.TextChoices): - FILESYSTEM = "FILESYSTEM", "FileSystem connector" - DATABASE = "DATABASE", "Database Connector" - API = "API", "API Connector" - APPDEPLOYMENT = "APPDEPLOYMENT", "App Deployment" - MANUALREVIEW = "MANUALREVIEW", "Manual Review Queue Connector" - - id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - workflow = models.ForeignKey( - Workflow, - on_delete=models.CASCADE, - db_index=True, - editable=False, - db_comment="Foreign key from Workflow model", - ) - endpoint_type = models.CharField( - choices=EndpointType.choices, - editable=False, - db_comment="Endpoint type (source or destination)", - ) - connection_type = models.CharField( - choices=ConnectionType.choices, - blank=True, - db_comment="Connection type (Filesystem, Database, API or Manualreview)", - ) - configuration = models.JSONField( - blank=True, null=True, db_comment="Configuration in JSON format" - ) - connector_instance = models.ForeignKey( - ConnectorInstance, - on_delete=models.CASCADE, - db_index=True, - null=True, - db_comment="Foreign key from ConnectorInstance model", - ) - - class Meta: - db_table = "workflow_endpoints" - verbose_name = "Workflow Endpoint" - verbose_name_plural = "Workflow Endpoints" diff --git a/backend/workflow_manager/endpoint/queue_utils.py b/backend/workflow_manager/endpoint/queue_utils.py deleted file mode 100644 index 5311fadbf..000000000 --- a/backend/workflow_manager/endpoint/queue_utils.py +++ /dev/null @@ -1,51 +0,0 @@ -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Any, Optional - -from utils.constants import Common -from workflow_manager.endpoint.exceptions import UnstractQueueException - -from unstract.connectors.queues import connectors as queue_connectors -from unstract.connectors.queues.unstract_queue import UnstractQueue - -logger = logging.getLogger(__name__) - - -class QueueResultStatus(Enum): - SUCCESS = "success" - FAILURE = "failure" - # Add other statuses as needed - - -class QueueUtils: - @staticmethod - def get_queue_inst(connector_settings: dict[str, Any] = {}) -> UnstractQueue: - if not queue_connectors: - raise UnstractQueueException(detail="Queue connector not exists") - queue_connector_key = next(iter(queue_connectors)) - connector = queue_connectors[queue_connector_key][Common.METADATA][ - Common.CONNECTOR - ] - connector_class: UnstractQueue = connector(connector_settings) - return connector_class - - -@dataclass -class QueueResult: - file: str - status: QueueResultStatus - result: Any - workflow_id: str - file_content: str - whisper_hash: Optional[str] = None - - def to_dict(self) -> Any: - return { - "file": self.file, - "whisper_hash": self.whisper_hash, - "status": self.status, - "result": self.result, - "workflow_id": self.workflow_id, - "file_content": self.file_content, - } diff --git a/backend/workflow_manager/endpoint/serializers.py b/backend/workflow_manager/endpoint/serializers.py deleted file mode 100644 index 30ce2f890..000000000 --- a/backend/workflow_manager/endpoint/serializers.py +++ /dev/null @@ -1,12 +0,0 @@ -import logging - -from rest_framework.serializers import ModelSerializer -from workflow_manager.endpoint.models import WorkflowEndpoint - -logger = logging.getLogger(__name__) - - -class WorkflowEndpointSerializer(ModelSerializer): - class Meta: - model = WorkflowEndpoint - fields = "__all__" diff --git a/backend/workflow_manager/endpoint/source.py b/backend/workflow_manager/endpoint/source.py deleted file mode 100644 index 0ba7df8e9..000000000 --- a/backend/workflow_manager/endpoint/source.py +++ /dev/null @@ -1,649 +0,0 @@ -import fnmatch -import logging -import os -import shutil -from hashlib import md5, sha256 -from io import BytesIO -from itertools import islice -from typing import Any, Optional - -import fsspec -from connector.models import ConnectorInstance -from connector_processor.constants import ConnectorKeys -from django.core.files.uploadedfile import UploadedFile -from django.db import connection -from unstract.workflow_execution.enums import LogState -from workflow_manager.endpoint.base_connector import BaseConnector -from workflow_manager.endpoint.constants import ( - FilePattern, - FileSystemConnector, - FileType, - SourceConstant, - SourceKey, - WorkflowFileType, -) -from workflow_manager.endpoint.dto import FileHash -from workflow_manager.endpoint.exceptions import ( - FileHashMismatched, - FileHashNotFound, - InvalidInputDirectory, - InvalidSourceConnectionType, - MissingSourceConnectionType, - OrganizationIdNotFound, - SourceConnectorNotConfigured, -) -from workflow_manager.endpoint.models import WorkflowEndpoint -from workflow_manager.workflow.execution import WorkflowExecutionServiceHelper -from workflow_manager.workflow.file_history_helper import FileHistoryHelper -from workflow_manager.workflow.models.workflow import Workflow - -logger = logging.getLogger(__name__) - - -# TODO: Inherit from SourceConnector for different sources - File, API .etc. -class SourceConnector(BaseConnector): - """A class representing a source connector for a workflow. - - This class extends the BaseConnector class and provides methods for - interacting with different types of source connectors, - such as file system connectors and API connectors. - It allows listing files from the source connector, - adding files to the execution volume, and retrieving JSON schemas for - different types of connectors. - - Attributes: - workflow (Workflow): The workflow associated with the source connector. - """ - - def __init__( - self, - workflow: Workflow, - execution_id: str, - organization_id: Optional[str] = None, - execution_service: Optional[WorkflowExecutionServiceHelper] = None, - ) -> None: - """Create a SourceConnector. - - Args: - workflow (Workflow): Associated workflow instance - execution_id (str): UUID of the current execution - organization_id (Optional[str]): Organization ID. Defaults to None. - execution_service (Optional[WorkflowExecutionServiceHelper]): Instance of - WorkflowExecutionServiceHelper that helps with WF execution. - Defaults to None. This is not used in case of execution by API. - - Raises: - OrganizationIdNotFound: _description_ - """ - organization_id = organization_id or connection.tenant.schema_name - if not organization_id: - raise OrganizationIdNotFound() - super().__init__(workflow.id, execution_id, organization_id) - self.endpoint = self._get_endpoint_for_workflow(workflow=workflow) - self.workflow = workflow - self.execution_id = execution_id - self.organization_id = organization_id - self.hash_value_of_file_content: Optional[str] = None - self.execution_service = execution_service - - def _get_endpoint_for_workflow( - self, - workflow: Workflow, - ) -> WorkflowEndpoint: - """Get WorkflowEndpoint instance. - - Args: - workflow (Workflow): Workflow - - Returns: - WorkflowEndpoint: _description_ - """ - endpoint: WorkflowEndpoint = WorkflowEndpoint.objects.get( - workflow=workflow, - endpoint_type=WorkflowEndpoint.EndpointType.SOURCE, - ) - if endpoint.connector_instance: - endpoint.connector_instance.connector_metadata = ( - endpoint.connector_instance.metadata - ) - return endpoint - - def validate(self) -> None: - connection_type = self.endpoint.connection_type - connector: ConnectorInstance = self.endpoint.connector_instance - if connection_type is None: - raise MissingSourceConnectionType() - if connection_type not in WorkflowEndpoint.ConnectionType.values: - raise InvalidSourceConnectionType() - if connection_type != WorkflowEndpoint.ConnectionType.API and connector is None: - raise SourceConnectorNotConfigured() - - def valid_file_patterns(self, required_patterns: list[Any]) -> list[str]: - patterns = { - FileType.PDF_DOCUMENTS: FilePattern.PDF_DOCUMENTS, - FileType.TEXT_DOCUMENTS: FilePattern.TEXT_DOCUMENTS, - FileType.IMAGES: FilePattern.IMAGES, - } - wildcard = [] - if not required_patterns: - wildcard.append("*") - else: - for pattern in required_patterns: - wildcard.extend(patterns.get(pattern, [])) - return wildcard - - def list_file_from_api_storage( - self, file_hashes: dict[str, FileHash] - ) -> tuple[dict[str, FileHash], int]: - """List all files from the api_storage_dir directory.""" - return file_hashes, len(file_hashes) - - def list_files_from_file_connector(self) -> tuple[dict[str, FileHash], int]: - """_summary_ - - Raises: - InvalidDirectory: _description_ - - Returns: - tuple[dict[str, FileHash], int]: A dictionary of matched file paths - and their corresponding FileHash objects, along with the total count - of matched files. - """ - connector: ConnectorInstance = self.endpoint.connector_instance - connector_settings: dict[str, Any] = connector.connector_metadata - source_configurations: dict[str, Any] = self.endpoint.configuration - required_patterns = list( - source_configurations.get(SourceKey.FILE_EXTENSIONS, []) - ) - recursive = bool( - source_configurations.get(SourceKey.PROCESS_SUB_DIRECTORIES, False) - ) - limit = int( - source_configurations.get( - SourceKey.MAX_FILES, FileSystemConnector.MAX_FILES - ) - ) - root_dir_path = connector_settings.get(ConnectorKeys.PATH, "") - folders_to_process = list(source_configurations.get(SourceKey.FOLDERS, ["/"])) - # Process from root in case its user provided list is empty - if not folders_to_process: - folders_to_process = ["/"] - patterns = self.valid_file_patterns(required_patterns=required_patterns) - self.publish_user_sys_log( - f"Matching for patterns '{', '.join(patterns)}' from " - f"'{', '.join(folders_to_process)}'" - ) - - source_fs = self.get_fs_connector( - settings=connector_settings, connector_id=connector.connector_id - ) - source_fs_fsspec = source_fs.get_fsspec_fs() - # Checking if folders exist at source before processing - # TODO: Validate while receiving this input configuration as well - for input_directory in folders_to_process: - # TODO: Move to connector class for better error handling - try: - input_directory = source_fs.get_connector_root_dir( - input_dir=input_directory, root_path=root_dir_path - ) - if not source_fs_fsspec.isdir(input_directory): - raise InvalidInputDirectory(dir=input_directory) - except Exception as e: - msg = f"Error while validating path '{input_directory}'. {str(e)}" - self.publish_user_sys_log(msg) - if isinstance(e, InvalidInputDirectory): - raise - raise InvalidInputDirectory(detail=msg) - - total_files_to_process = 0 - total_matched_files = {} - - for input_directory in folders_to_process: - input_directory = source_fs.get_connector_root_dir( - input_dir=input_directory, root_path=root_dir_path - ) - logger.debug(f"Listing files from: {input_directory}") - matched_files, count = self._get_matched_files( - source_fs_fsspec, input_directory, patterns, recursive, limit - ) - self.publish_user_sys_log( - f"Matched '{count}' files from '{input_directory}'" - ) - total_matched_files.update(matched_files) - total_files_to_process += count - self.publish_input_output_list_file_logs( - folders_to_process, total_matched_files, total_files_to_process - ) - return total_matched_files, total_files_to_process - - def publish_user_sys_log(self, msg: str) -> None: - """Publishes log to the user and system. - - Pushes logs messages to the configured logger and to the - websocket channel if the `execution_service` is configured. - - Args: - msg (str): Message to log - """ - logger.info(msg) - if self.execution_service: - self.execution_service.publish_log(msg) - - def publish_input_output_list_file_logs( - self, folders: list[str], matched_files: dict[str, FileHash], count: int - ) -> None: - if not self.execution_service: - return None - - folders_list = "\n".join(f"- `{folder.strip()}`" for folder in folders) - input_log = f"##Folders to process:\n\n{folders_list}\n\n" - self.execution_service.publish_update_log( - state=LogState.INPUT_UPDATE, message=input_log - ) - output_log = self._matched_files_component_log(matched_files, count) - self.execution_service.publish_update_log( - state=LogState.OUTPUT_UPDATE, message=output_log - ) - - def publish_input_file_content(self, input_file_path: str, input_text: str) -> None: - if not self.execution_service: - return None - output_log_message = f"##Input text:\n\n```text\n{input_text}\n```\n\n" - input_log_message = ( - "##Input file:\n\n```text\n" f"{os.path.basename(input_file_path)}\n```\n\n" - ) - self.execution_service.publish_update_log( - state=LogState.INPUT_UPDATE, message=input_log_message - ) - self.execution_service.publish_update_log( - state=LogState.OUTPUT_UPDATE, message=output_log_message - ) - - def _matched_files_component_log( - self, matched_files: dict[str, FileHash], count: int - ) -> str: - output_log = "### Matched files \n```text\n\n\n" - for file_path in islice(matched_files.keys(), 20): - output_log += f"- {file_path}\n" - output_log += "```\n\n" - output_log += f"""Total matched files: {count} - \n\nPlease note that only the first 20 files are shown.\n\n""" - return output_log - - def _get_matched_files( - self, - source_fs: Any, - input_directory: str, - patterns: list[str], - recursive: bool, - limit: int, - ) -> tuple[dict[str, FileHash], int]: - """Get a dictionary of matched files based on patterns in a directory. - - This method searches for files in the specified `input_directory` that - match any of the given `patterns`. The search can be performed recursively - if `recursive` is set to True. The number of matched files returned is - limited by `limit`. - - Args: - source_fs (Any): The file system object used for searching. - input_directory (str): The directory to search for files. - patterns (list[str]): The patterns to match against file names. - recursive (bool): Whether to perform a recursive search. - limit (int): The maximum number of matched files to return. - - Returns: - tuple[dict[str, FileHash], int]: A dictionary of matched file paths - and their corresponding FileHash objects, along with the total count - of matched files. - """ - matched_files: dict[str, FileHash] = {} - count = 0 - max_depth = int(SourceConstant.MAX_RECURSIVE_DEPTH) if recursive else 1 - - for root, dirs, files in source_fs.walk(input_directory, maxdepth=max_depth): - for file in files: - if count >= limit: - break - if self._should_process_file(file, patterns): - file_path = str(os.path.join(root, file)) - if self._is_new_file( - file_path=file_path, workflow=self.endpoint.workflow - ): - matched_files[file_path] = self._create_file_hash(file_path) - count += 1 - - return matched_files, count - - def _should_process_file(self, file: str, patterns: list[str]) -> bool: - """Check if the file should be processed based on the patterns.""" - return bool(file) and any( - fnmatch.fnmatchcase(file.lower(), pattern.lower()) for pattern in patterns - ) - - def _is_new_file(self, file_path: str, workflow: Workflow) -> bool: - """Check if the file is new or already processed.""" - file_content = self.get_file_content(input_file_path=file_path) - file_hash = self.get_hash_value(file_content) - file_history = FileHistoryHelper.get_file_history( - workflow=workflow, cache_key=file_hash - ) - - # In case of ETL pipelines, its necessary to skip files which have - # already been processed - if ( - self.execution_service.use_file_history - and file_history - and file_history.is_completed() - ): - self.execution_service.publish_log( - f"Skipping file {file_path} as it has already been processed. " - "Clear the file markers to process it again." - ) - return False - - return True - - def _create_file_hash(self, file_path: str) -> FileHash: - """Create a FileHash object for the matched file.""" - file_name = os.path.basename(file_path) - file_content = self.get_file_content(input_file_path=file_path) - file_hash = self.get_hash_value(file_content) - connection_type = self.endpoint.connection_type - - return FileHash( - file_path=file_path, - source_connection_type=connection_type, - file_name=file_name, - file_hash=file_hash, - ) - - # TODO: Get file count from len of dict instead of returning tuple - def list_files_from_source( - self, file_hashes: dict[str, FileHash] = {} - ) -> tuple[dict[str, FileHash], int]: - """List files from source connector. - - Args: - api_storage_dir (Optional[str], optional): API storage directory - Returns: - tuple[dict[str, FileHash], int]: A dictionary of FileHashes, - along with the total count of matched files. - """ - connection_type = self.endpoint.connection_type - if connection_type == WorkflowEndpoint.ConnectionType.FILESYSTEM: - return self.list_files_from_file_connector() - elif connection_type == WorkflowEndpoint.ConnectionType.API: - return self.list_file_from_api_storage(file_hashes) - raise InvalidSourceConnectionType() - - @classmethod - def hash_str(cls, string_to_hash: Any, hash_method: str = "sha256") -> str: - """Computes the hash for a given input string. - - Useful to hash strings needed for caching and other purposes. - Hash method defaults to "md5" - - Args: - string_to_hash (str): String to be hashed - hash_method (str): Hash hash_method to use, supported ones - - "md5" - - Returns: - str: Hashed string - """ - if hash_method == "md5": - if isinstance(string_to_hash, bytes): - return str(md5(string_to_hash).hexdigest()) - return str(md5(string_to_hash.encode()).hexdigest()) - elif hash_method == "sha256": - if isinstance(string_to_hash, (bytes, bytearray)): - return str(sha256(string_to_hash).hexdigest()) - return str(sha256(string_to_hash.encode()).hexdigest()) - else: - raise ValueError(f"Unsupported hash_method: {hash_method}") - - def get_file_content(self, input_file_path: str, chunk_size: int = 8192) -> bytes: - """Read the content of a file from a remote filesystem in chunks. - - Args: - input_file_path (str): The path of the input file. - chunk_size (int): The size of the chunks to read at a time - (default is 8192 bytes). - - Returns: - bytes: The content of the file. - """ - connector: ConnectorInstance = self.endpoint.connector_instance - connector_settings: dict[str, Any] = connector.connector_metadata - source_fs = self.get_fsspec( - settings=connector_settings, connector_id=connector.connector_id - ) - file_content = bytearray() # Use bytearray for efficient byte concatenation - with source_fs.open(input_file_path, "rb") as remote_file: - while chunk := remote_file.read(chunk_size): - file_content.extend(chunk) - - return bytes(file_content) - - def get_hash_value(self, file_content: bytes) -> str: - """Generate a hash value from the file content. - - Args: - file_content (bytes): The content of the file. - - Returns: - str: The hash value of the file content. - """ - return self.hash_str(file_content) - - def copy_file_to_infile_dir(self, source_file_path: str, infile_path: str) -> None: - """Copy the source file to the infile directory. - - Args: - source_file_path (str): The path of the source file. - infile_path (str): The destination path in the infile directory. - """ - shutil.copyfile(source_file_path, infile_path) - logger.info(f"File copied from {source_file_path} to {infile_path}") - - def add_input_from_connector_to_volume(self, input_file_path: str) -> str: - """Add input file to execution directory. - - Args: - input_file_path (str): The path of the input file. - - Returns: - str: The hash value of the file content. - - Raises: - FileHashNotFound: If the hash value of the file content is not found. - """ - source_file_path = os.path.join(self.execution_dir, WorkflowFileType.SOURCE) - infile_path = os.path.join(self.execution_dir, WorkflowFileType.INFILE) - source_file = f"file://{source_file_path}" - - # Get file content and hash value - file_content = self.get_file_content(input_file_path) - hash_value_of_file_content = self.get_hash_value(file_content) - - logger.info( - f"hash_value_of_file {source_file} is : {hash_value_of_file_content}" - ) - - input_log = ( - file_content[:500].decode("utf-8", errors="replace") + "...(truncated)" - ) - self.publish_input_file_content(input_file_path, input_log) - - with fsspec.open(source_file, "wb") as local_file: - local_file.write(file_content) - - # Copy file to infile directory - self.copy_file_to_infile_dir(source_file_path, infile_path) - - logger.info(f"{input_file_path} is added to execution directory") - return hash_value_of_file_content - - def add_input_from_api_storage_to_volume(self, input_file_path: str) -> None: - """Add input file to execution directory from api storage.""" - infile_path = os.path.join(self.execution_dir, WorkflowFileType.INFILE) - source_path = os.path.join(self.execution_dir, WorkflowFileType.SOURCE) - shutil.copyfile(input_file_path, infile_path) - shutil.copyfile(input_file_path, source_path) - - def add_file_to_volume(self, input_file_path: str, file_hash: FileHash) -> str: - """Add input file to execution directory. - - Args: - input_file_path (str): source file - - Raises: - InvalidSource: _description_ - - Returns: - str: file_name - """ - connection_type = self.endpoint.connection_type - file_name = os.path.basename(input_file_path) - if connection_type == WorkflowEndpoint.ConnectionType.FILESYSTEM: - file_content_hash = self.add_input_from_connector_to_volume( - input_file_path=input_file_path, - ) - if file_content_hash != file_hash.file_hash: - raise FileHashMismatched() - elif connection_type == WorkflowEndpoint.ConnectionType.API: - self.add_input_from_api_storage_to_volume(input_file_path=input_file_path) - if file_name != file_hash.file_name: - raise FileHashNotFound() - file_content_hash = file_hash.file_hash - else: - raise InvalidSourceConnectionType() - - self.add_metadata_to_volume( - input_file_path=input_file_path, source_hash=file_content_hash - ) - return file_name - - def handle_final_result( - self, - results: list[dict[str, Any]], - file_name: str, - result: Optional[str], - ) -> None: - connection_type = self.endpoint.connection_type - if connection_type == WorkflowEndpoint.ConnectionType.API: - results.append({"file": file_name, "result": result}) - - def load_file(self, input_file_path: str) -> tuple[str, BytesIO]: - """Load file contnt and file name based on the file path. - - Args: - input_file_path (str): source file - - Raises: - InvalidSource: _description_ - - Returns: - tuple[str, BytesIO]: file_name , file content - """ - connector: ConnectorInstance = self.endpoint.connector_instance - connector_settings: dict[str, Any] = connector.connector_metadata - source_fs: fsspec.AbstractFileSystem = self.get_fsspec( - settings=connector_settings, connector_id=connector.connector_id - ) - with source_fs.open(input_file_path, "rb") as remote_file: - file_content = remote_file.read() - file_stream = BytesIO(file_content) - - return os.path.basename(input_file_path), file_stream - - @classmethod - def add_input_file_to_api_storage( - cls, - workflow_id: str, - execution_id: str, - file_objs: list[UploadedFile], - use_file_history: bool = False, - ) -> dict[str, FileHash]: - """Add input file to api storage. - - Args: - workflow_id (str): UUID of the worklfow - execution_id (str): UUID of the execution - file_objs (list[UploadedFile]): List of uploaded files - use_file_history (bool): Use FileHistory table to return results on already - processed files. Defaults to False - returns: - dict[str, FileHash]: Dict containing file name and its corresponding hash - """ - api_storage_dir = cls.get_api_storage_dir_path( - workflow_id=workflow_id, execution_id=execution_id - ) - workflow: Workflow = Workflow.objects.get(id=workflow_id) - file_hashes: dict[str, FileHash] = {} - for file in file_objs: - file_name = file.name - destination_path = os.path.join(api_storage_dir, file_name) - os.makedirs(os.path.dirname(destination_path), exist_ok=True) - with open(destination_path, "wb") as f: - buffer = bytearray() - for chunk in file.chunks(): - buffer.extend(chunk) - f.write(buffer) - file_hash = cls.hash_str(buffer) - connection_type = WorkflowEndpoint.ConnectionType.API - - file_history = None - if use_file_history: - file_history = FileHistoryHelper.get_file_history( - workflow=workflow, cache_key=file_hash - ) - is_executed = ( - True if file_history and file_history.is_completed() else False - ) - file_hash = FileHash( - file_path=destination_path, - source_connection_type=connection_type, - file_name=file_name, - file_hash=file_hash, - is_executed=is_executed, - ) - file_hashes.update({file_name: file_hash}) - return file_hashes - - @classmethod - def create_endpoint_for_workflow( - cls, - workflow: Workflow, - ) -> None: - """Creating WorkflowEndpoint entity.""" - endpoint = WorkflowEndpoint( - workflow=workflow, - endpoint_type=WorkflowEndpoint.EndpointType.SOURCE, - ) - endpoint.save() - - @classmethod - def get_json_schema_for_api(cls) -> dict[str, Any]: - """Json schema for api. - - Returns: - dict[str, Any]: _description_ - """ - schema_path = os.path.join( - os.path.dirname(__file__), "static", "src", "api.json" - ) - return cls.get_json_schema(file_path=schema_path) - - @classmethod - def get_json_schema_for_file_system(cls) -> dict[str, Any]: - """Json schema for Filesystem. - - Returns: - dict[str, Any]: _description_ - """ - schema_path = os.path.join( - os.path.dirname(__file__), "static", "src", "file.json" - ) - return cls.get_json_schema(file_path=schema_path) diff --git a/backend/workflow_manager/endpoint/static/dest/db.json b/backend/workflow_manager/endpoint/static/dest/db.json deleted file mode 100644 index bdbf99a16..000000000 --- a/backend/workflow_manager/endpoint/static/dest/db.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "title": "Workflow DB Destination", - "description": "Settings for DB Destination", - "type": "object", - "required": [ - "table", - "includeAgent", - "includeTimestamp", - "columnMode" - ], - "properties": { - "table": { - "type": "string", - "title": "Table", - "default": "", - "description": "Table to store the output. If your database supports schemas, use the format schema.table" - }, - "includeAgent": { - "type": "boolean", - "title": "Include 'created_by' column", - "default": false, - "description": "Include the 'created_by' in the output row" - }, - "agentName": { - "type": "string", - "title": "Agent Name", - "enum": [ - "Unstract/DBWriter" - ], - "default": "Unstract/DBWriter", - "description": "Name of the agent to use as the 'created_by' value" - }, - "includeTimestamp": { - "type": "boolean", - "title": "Include 'created_at' column", - "default": false, - "description": "Include the 'created_at' in the output row" - }, - "filePath": { - "type": "string", - "title": "File Path Column Name", - "default": "file_path", - "description": "Name of the column to store the absolute path to the file" - }, - "executionId": { - "type": "string", - "title": "Execution ID Column Name", - "default": "execution_id", - "description": "Name of the column that stores the execution ID which identifies each individual run of a workflow" - }, - "columnMode": { - "type": "string", - "title": "Select how you want to write the output", - "enum": [ - "Write JSON to a single column" - ], - "default": "Write JSON to a single column" - }, - "singleColumnName": { - "type": "string", - "title": "Single Column Name", - "default": "data", - "description": "Name of the column to write the JSON to" - } - } -} diff --git a/backend/workflow_manager/endpoint/static/dest/file.json b/backend/workflow_manager/endpoint/static/dest/file.json deleted file mode 100644 index 95111993f..000000000 --- a/backend/workflow_manager/endpoint/static/dest/file.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "title": "Workflow File Destination", - "description": "Settings for File Destination", - "type": "object", - "required": [ - "outputFolder" - ], - "properties": { - "outputFolder": { - "type": "string", - "title": "Output folder", - "default": "output", - "description": "Folder to store the output", - "minLength": 1, - "maxLength": 100, - "format": "file-path" - } - } -} diff --git a/backend/workflow_manager/endpoint/static/src/api.json b/backend/workflow_manager/endpoint/static/src/api.json deleted file mode 100644 index 8d4a9d022..000000000 --- a/backend/workflow_manager/endpoint/static/src/api.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "title": "Workflow API Source", - "description": "Settings for API Source", - "type": "object", - "required": [], - "properties": { - "fileExtensions": { - "type": "array", - "title": "File types to process", - "description": "Limit the file types to process. Leave it empty to process all files", - "items": { - "type": "string", - "enum": [ - "PDF documents", - "Text documents", - "Images" - ] - } - } - } -} diff --git a/backend/workflow_manager/endpoint/static/src/file.json b/backend/workflow_manager/endpoint/static/src/file.json deleted file mode 100644 index 23bf3dc00..000000000 --- a/backend/workflow_manager/endpoint/static/src/file.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "title": "Workflow File Source", - "description": "Settings for File Source", - "type": "object", - "required": [], - "properties": { - "folders": { - "type": "array", - "title": "Folders to process", - "description": "Folders in the connected filesystem to start processing files from. Leave it empty or pass '/' to specify the root folder. Separate inputs with ⏎ Enter.", - "items": { - "type": "string", - "title": "Folder Path" - }, - "uniqueItems": true - }, - "processSubDirectories": { - "type": "boolean", - "title": "Process sub-folders", - "default": true, - "description": "Process sub folders recursively" - }, - "fileExtensions": { - "type": "array", - "title": "File types to process", - "description": "Limit the file types to process. Leave it empty to process all files", - "items": { - "type": "string", - "enum": [ - "PDF documents", - "Text documents", - "Images" - ] - } - }, - "maxFiles": { - "type": "number", - "title": "Max files to process", - "default": 100, - "description": "The maximum number of files to process" - } - } -} diff --git a/backend/workflow_manager/endpoint/tests.py b/backend/workflow_manager/endpoint/tests.py deleted file mode 100644 index a39b155ac..000000000 --- a/backend/workflow_manager/endpoint/tests.py +++ /dev/null @@ -1 +0,0 @@ -# Create your tests here. diff --git a/backend/workflow_manager/endpoint/tests/__init__.py b/backend/workflow_manager/endpoint/tests/__init__.py deleted file mode 100644 index 6ec742384..000000000 --- a/backend/workflow_manager/endpoint/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from backend.celery_service import app as celery_app # type: ignore - -__all__ = ["celery_app"] diff --git a/backend/workflow_manager/endpoint/tests/test_database_utils/__init__.py b/backend/workflow_manager/endpoint/tests/test_database_utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/workflow_manager/endpoint/tests/test_database_utils/base_test_db.py b/backend/workflow_manager/endpoint/tests/test_database_utils/base_test_db.py deleted file mode 100644 index d83fc3a59..000000000 --- a/backend/workflow_manager/endpoint/tests/test_database_utils/base_test_db.py +++ /dev/null @@ -1,160 +0,0 @@ -import datetime -import json -import os -from typing import Any - -import pytest # type: ignore -from dotenv import load_dotenv - -from unstract.connectors.databases.bigquery import BigQuery -from unstract.connectors.databases.mariadb import MariaDB -from unstract.connectors.databases.mssql import MSSQL -from unstract.connectors.databases.mysql import MySQL -from unstract.connectors.databases.postgresql import PostgreSQL -from unstract.connectors.databases.redshift import Redshift -from unstract.connectors.databases.snowflake import SnowflakeDB -from unstract.connectors.databases.unstract_db import UnstractDB - -load_dotenv("test.env") - - -class BaseTestDB: - @pytest.fixture(autouse=True) - def base_setup(self) -> None: - self.postgres_creds = { - "user": os.getenv("DB_USER"), - "password": os.getenv("DB_PASSWORD"), - "host": os.getenv("DB_HOST"), - "port": os.getenv("DB_PORT"), - "database": os.getenv("DB_NAME"), - } - self.redshift_creds = { - "user": os.getenv("REDSHIFT_USER"), - "password": os.getenv("REDSHIFT_PASSWORD"), - "host": os.getenv("REDSHIFT_HOST"), - "port": os.getenv("REDSHIFT_PORT"), - "database": os.getenv("REDSHIFT_DB"), - } - self.snowflake_creds = { - "user": os.getenv("SNOWFLAKE_USER"), - "password": os.getenv("SNOWFLAKE_PASSWORD"), - "account": os.getenv("SNOWFLAKE_ACCOUNT"), - "role": os.getenv("SNOWFLAKE_ROLE"), - "database": os.getenv("SNOWFLAKE_DB"), - "schema": os.getenv("SNOWFLAKE_SCHEMA"), - "warehouse": os.getenv("SNOWFLAKE_WAREHOUSE"), - } - self.mssql_creds = { - "user": os.getenv("MSSQL_USER"), - "password": os.getenv("MSSQL_PASSWORD"), - "server": os.getenv("MSSQL_SERVER"), - "port": os.getenv("MSSQL_PORT"), - "database": os.getenv("MSSQL_DB"), - } - self.mysql_creds = { - "user": os.getenv("MYSQL_USER"), - "password": os.getenv("MYSQL_PASSWORD"), - "host": os.getenv("MYSQL_SERVER"), - "port": os.getenv("MYSQL_PORT"), - "database": os.getenv("MYSQL_DB"), - } - self.mariadb_creds = { - "user": os.getenv("MARIADB_USER"), - "password": os.getenv("MARIADB_PASSWORD"), - "host": os.getenv("MARIADB_SERVER"), - "port": os.getenv("MARIADB_PORT"), - "database": os.getenv("MARIADB_DB"), - } - self.database_entry = { - "created_by": "Unstract/DBWriter", - "created_at": datetime.datetime(2024, 5, 20, 7, 46, 57, 307998), - "data": '{"input_file": "simple.pdf", "result": "report"}', - } - valid_schema_name = "public" - invalid_schema_name = "public_1" - self.valid_postgres_creds = {**self.postgres_creds, "schema": valid_schema_name} - self.invalid_postgres_creds = { - **self.postgres_creds, - "schema": invalid_schema_name, - } - self.valid_redshift_creds = {**self.redshift_creds, "schema": valid_schema_name} - self.invalid_redshift_creds = { - **self.redshift_creds, - "schema": invalid_schema_name, - } - self.invalid_syntax_table_name = "invalid-syntax.name.test_output" - self.invalid_wrong_table_name = "database.schema.test_output" - self.valid_table_name = "test_output" - bigquery_json_str = os.getenv("BIGQUERY_CREDS", "{}") - self.bigquery_settings = json.loads(bigquery_json_str) - self.bigquery_settings["json_credentials"] = bigquery_json_str - self.valid_bigquery_table_name = "unstract.bigquery_test.bigquery_output" - self.invalid_snowflake_db = {**self.snowflake_creds, "database": "invalid"} - self.invalid_snowflake_schema = {**self.snowflake_creds, "schema": "invalid"} - self.invalid_snowflake_warehouse = { - **self.snowflake_creds, - "warehouse": "invalid", - } - - # Gets all valid db instances except - # Bigquery (table name needs to be writted separately for bigquery) - @pytest.fixture( - params=[ - ("valid_postgres_creds", PostgreSQL), - ("snowflake_creds", SnowflakeDB), - ("mssql_creds", MSSQL), - ("mysql_creds", MySQL), - ("mariadb_creds", MariaDB), - ("valid_redshift_creds", Redshift), - ] - ) - def valid_dbs_instance(self, request: Any) -> Any: - return self.get_db_instance(request=request) - - # Gets all valid db instances except: - # Bigquery (table name needs to be writted separately for bigquery) - # Redshift (can't process more than 64KB character type) - @pytest.fixture( - params=[ - ("valid_postgres_creds", PostgreSQL), - ("snowflake_creds", SnowflakeDB), - ("mssql_creds", MSSQL), - ("mysql_creds", MySQL), - ("mariadb_creds", MariaDB), - ] - ) - def valid_dbs_instance_to_handle_large_doc(self, request: Any) -> Any: - return self.get_db_instance(request=request) - - def get_db_instance(self, request: Any) -> UnstractDB: - creds_name, db_class = request.param - creds = getattr(self, creds_name) - if not creds: - pytest.fail(f"Unknown credentials: {creds_name}") - db_instance = db_class(settings=creds) - return db_instance - - # Gets all invalid-db instances for postgres, redshift: - @pytest.fixture( - params=[ - ("invalid_postgres_creds", PostgreSQL), - ("invalid_redshift_creds", Redshift), - ] - ) - def invalid_dbs_instance(self, request: Any) -> Any: - return self.get_db_instance(request=request) - - @pytest.fixture - def valid_bigquery_db_instance(self) -> Any: - return BigQuery(settings=self.bigquery_settings) - - # Gets all invalid-db instances for snowflake: - @pytest.fixture( - params=[ - ("invalid_snowflake_db", SnowflakeDB), - ("invalid_snowflake_schema", SnowflakeDB), - ("invalid_snowflake_warehouse", SnowflakeDB), - ] - ) - def invalid_snowflake_db_instance(self, request: Any) -> Any: - return self.get_db_instance(request=request) diff --git a/backend/workflow_manager/endpoint/tests/test_database_utils/static/large_doc.txt b/backend/workflow_manager/endpoint/tests/test_database_utils/static/large_doc.txt deleted file mode 100644 index 3a3b67a00..000000000 --- a/backend/workflow_manager/endpoint/tests/test_database_utils/static/large_doc.txt +++ /dev/null @@ -1 +0,0 @@ -"\n\n UNITED STATES \n SECURITIES AND EXCHANGE COMMISSION \n Washington, D.C. 20549 \n\n FORM 10-Q \n\n(Mark One) \n [X] X QUARTERLY REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934 \n For the quarterly period ended December 30, 2023 \n or \n [ ] TRANSITION REPORT PURSUANT TO SECTION 13 OR 15(d) OF THE SECURITIES EXCHANGE ACT OF 1934 \n For the transition period from to \n Commission File Number: 001-36743 \n\n Apple Inc. \n (Exact name of Registrant as specified in its charter) \n\n California 94-2404110 \n (State or other jurisdiction (I.R.S. Employer Identification No.) \n of incorporation or organization) \n\n One Apple Park Way \n Cupertino, California 95014 \n (Address of principal executive offices) (Zip Code) \n (408) 996-1010 \n (Registrant\'s telephone number, including area code) \n\n Securities registered pursuant to Section 12(b) of the Act \n\n Title of each class symbol(s) Trading Name of each exchange on which registered \n Common Stock, $0.00001 par value per share AAPL The Nasdaq Stock Market LLC \n 0.000% Notes due 2025 The Nasdaq Stock Market LLC \n 0.875% Notes due 2025 The Nasdaq Stock Market LLC \n 1.625% Notes due 2026 The Nasdaq Stock Market LLC \n 2.000% Notes due 2027 The Nasdaq Stock Market LLC \n 1.375% Notes due 2029 The Nasdaq Stock Market LLC \n 3.050% Notes due 2029 The Nasdaq Stock Market LLC \n 0.500% Notes due 2031 The Nasdaq Stock Market LLC \n 3.600% Notes due 2042 The Nasdaq Stock Market LLC \n\nIndicate by check mark whether the Registrant (1) has filed all reports required to be filed by Section 13 or 15(d) of the Securities Exchange Act \nof 1934 during the preceding 12 months (or for such shorter period that the Registrant was required to file such reports), and (2) has been \nsubject to such filing requirements for the past 90 days. \n Yes [X] No [ ] \n\nIndicate by check mark whether the Registrant has submitted electronically every Interactive Data File required to be submitted pursuant to Rule \n405 of Regulation S-T (§232.405 of this chapter) during the preceding 12 months (or for such shorter period that the Registrant was required to \nsubmit such files). \n Yes [X] No [ ] \n<<<\n\n\nIndicate by check mark whether the Registrant is a large accelerated filer, an accelerated filer, a non-accelerated filer, a smaller reporting \ncompany, or an emerging growth company. See the definitions of "large accelerated filer," "accelerated filer," "smaller reporting company," and \n"emerging growth company" in Rule 12b-2 of the Exchange Act. \n\n Large accelerated filer [X] Accelerated filer [ ] \n Non-accelerated filer [ ] Smaller reporting company [ ] \n Emerging growth company [ ] \n\nIf an emerging growth company, indicate by check mark if the Registrant has elected not to use the extended transition period for complying with \nany new or revised financial accounting standards provided pursuant to Section 13(a) of the Exchange Act. [ ] \n\nIndicate by check mark whether the Registrant is a shell company (as defined in Rule 12b-2 of the Exchange Act). \n Yes [ ] No [X] \n\n 15,441,881,000 shares of common stock were issued and outstanding as of January 19, 2024. \n<<<\n\n\n Apple Inc. \n\n Form 10-Q \n\n For the Fiscal Quarter Ended December 30, 2023 \n TABLE OF CONTENTS \n\n Page \n Part I \nItem 1. Financial Statements 1 \nItem 2. Management\'s Discussion and Analysis of Financial Condition and Results of Operations 13 \nItem 3. Quantitative and Qualitative Disclosures About Market Risk 18 \nItem 4. Controls and Procedures 18 \n Part II \nItem 1. Legal Proceedings 19 \nItem 1A. Risk Factors 19 \nItem 2. Unregistered Sales of Equity Securities and Use of Proceeds 20 \nItem 3. Defaults Upon Senior Securities 21 \nItem 4. Mine Safety Disclosures 21 \nItem 5. Other Information 21 \nItem 6. Exhibits 21 \n<<<\n\n\nPARTI - FINANCIAL INFORMATION \n\nItem 1. Financial Statements \n\n Apple Inc. \n\n CONDENSED CONSOLIDATED STATEMENTS OF OPERATIONS (Unaudited) \n (In millions, except number of shares, which are reflected in thousands, and per-share amounts) \n\n Three Months Ended \n December 2023 30, December 2022 31, \n\n Net sales: \n Products $ 96,458 $ 96,388 \n Services 23,117 20,766 \n Total net sales 119,575 117,154 \n\n Cost of sales: \n Products 58,440 60,765 \n Services 6,280 6,057 \n Total cost of sales 64,720 66,822 \n Gross margin 54,855 50,332 \n\n Operating expenses: \n Research and development 7,696 7,709 \n Selling, general and administrative 6,786 6,607 \n Total operating expenses 14,482 14,316 \n\n Operating income 40,373 36,016 \n Other income/(expense), net (50) (393) \n Income before provision for income taxes 40,323 35,623 \n Provision for income taxes 6,407 5,625 \n Net income $ 33,916 $ 29,998 \n\n Earnings per share: \n Basic $ 2.19 $ 1.89 \n Diluted $ 2.18 $ 1.88 \n\n Shares used in computing earnings per share: \n Basic 15,509,763 15,892,723 \n Diluted 15,576,641 15,955,718 \n\n See accompanying Notes to Condensed Consolidated Financial Statements. \n\n Apple Inc. IQ1 2024 Form 10-Q | 1 \n<<<\n\n\n Apple Inc. \n\n CONDENSED CONSOLIDATED STATEMENTS OF COMPREHENSIVE INCOME (Unaudited) \n (In millions) \n\n Three Months Ended \n December 2023 30, December 2022 31, \n\nNet income $ 33,916 $ 29,998 \nOther comprehensive income/(loss): \n Change in foreign currency translation, net of tax 308 (14) \n\n Change in unrealized gains/losses on derivative instruments, net of tax: \n Change in fair value of derivative instruments (531) (988) \n Adjustment for net (gains)/losses realized and included in net income (823) (1,766) \n Total change in unrealized gains/losses on derivative instruments (1,354) (2,754) \n\n Change in unrealized gains/losses on marketable debt securities, net of tax: \n Change in fair value of marketable debt securities 3,045 900 \n Adjustment for net (gains)/losses realized and included in net income 75 65 \n Total change in unrealized gains/losses on marketable debt securities 3,120 965 \n\nTotal other comprehensive income/(loss) 2,074 (1,803) \nTotal comprehensive income $ 35,990 $ 28,195 \n\n See accompanying Notes to Condensed Consolidated Financial Statements. \n\n Apple Inc. I Q1 2024 Form 10-Q 12 \n<<<\n\n\n Apple Inc. \n\n CONDENSED CONSOLIDATED BALANCE SHEETS (Unaudited) \n (In millions, except number of shares, which are reflected in thousands, and par value) \n\n December 2023 30, September 2023 30, \n\n ASSETS: \nCurrent assets: \n Cash and cash equivalents $ 40,760 $ 29,965 \n Marketable securities 32,340 31,590 \n Accounts receivable, net 23,194 29,508 \n Vendor non-trade receivables 26,908 31,477 \n Inventories 6,511 6,331 \n Other current assets 13,979 14,695 \n Total current assets 143,692 143,566 \n\nNon-current assets: \n Marketable securities 99,475 100,544 \n Property, plant and equipment, net 43,666 43,715 \n Other non-current assets 66,681 64,758 \n Total non-current assets 209,822 209,017 \n Total assets $ 353,514 $ 352,583 \n\n LIABILITIES AND SHAREHOLDERS\' EQUITY: \nCurrent liabilities: \n Accounts payable $ 58,146 $ 62,611 \n Other current liabilities 54,611 58,829 \n Deferred revenue 8,264 8,061 \n Commercial paper 1,998 5,985 \n Term debt 10,954 9,822 \n Total current liabilities 133,973 145,308 \n\nNon-current liabilities : \n Term debt 95,088 95,281 \n Other non-current liabilities 50,353 49,848 \n Total non-current liabilities 145,441 145,129 \n Total liabilities 279,414 290,437 \n\nCommitments and contingencies \n\nShareholders\' equity: \n Common stock and additional paid-in capital, $0.00001 par value: 50,400,000 shares \n authorized; 15,460,223 and 15,550,061 shares issued and outstanding, respectively 75,236 73,812 \n Retained earnings/(Accumulated deficit) 8,242 (214) \n Accumulated other comprehensive loss (9,378) (11,452) \n Total shareholders\' equity 74,100 62,146 \n Total liabilities and shareholders\' equity $ 353,514 $ 352,583 \n\n See accompanying Notes to Condensed Consolidated Financial Statements. \n\n Apple Inc. IQ1 2024 Form 10-Q 13 \n<<<\n\n\n Apple Inc. \n\n CONDENSED CONSOLIDATED STATEMENTS OF SHAREHOLDERS\' EQUITY (Unaudited) \n (In millions, except per-share amounts) \n\n Three Months Ended \n December 2023 30, December 2022 31, \n\nTotal shareholders\' equity, beginning balances $ 62,146 $ 50,672 \n\nCommon stock and additional paid-in capital: \n Beginning balances 73,812 64,849 \n Common stock withheld related to net share settlement of equity awards (1,660) (1,434) \n Share-based compensation 3,084 2,984 \n Ending balances 75,236 66,399 \n\nRetained earnings/(Accumulated deficit): \n Beginning balances (214) (3,068) \n Net income 33,916 29,998 \n Dividends and dividend equivalents declared (3,774) (3,712) \n Common stock withheld related to net share settlement of equity awards (1,018) (978) \n Common stock repurchased (20,668) (19,000) \n Ending balances 8,242 3,240 \n\nAccumulated other comprehensive income/(loss): \n Beginning balances (11,452) (11,109) \n Other comprehensive income/(loss) 2,074 (1,803) \n Ending balances (9,378) (12,912) \n\nTotal shareholders\' equity, ending balances $ 74,100 $ 56,727 \n\nDividends and dividend equivalents declared per share or RSU $ 0.24 $ 0.23 \n\n See accompanying Notes to Condensed Consolidated Financial Statements. \n\n Apple Inc. 2024 Form 10-Q 14 \n<<<\n\n\n Apple Inc. \n\n CONDENSED CONSOLIDATED STATEMENTS OF CASH FLOWS (Unaudited) \n (In millions) \n\n Three Months Ended \n December 2023 30, December 2022 31, \n\nCash, cash equivalents and restricted cash, beginning balances $ 30,737 $ 24,977 \n\nOperating activities: \n Net income 33,916 29,998 \n Adjustments to reconcile net income to cash generated by operating activities: \n Depreciation and amortization 2,848 2,916 \n Share-based compensation expense 2,997 2,905 \n Other (989) (317) \n Changes in operating assets and liabilities: \n Accounts receivable, net 6,555 4,275 \n Vendor non-trade receivables 4,569 2,320 \n Inventories (137) (1,807) \n Other current and non-current assets (1,457) (4,099) \n Accounts payable (4,542) (6,075) \n Other current and non-current liabilities (3,865) 3,889 \n Cash generated by operating activities 39,895 34,005 \n\nInvesting activities: \n Purchases of marketable securities (9,780) (5,153) \n Proceeds from maturities of marketable securities 13,046 7,127 \n Proceeds from sales of marketable securities 1,337 509 \n Payments for acquisition of property, plant and equipment (2,392) (3,787) \n Other (284) (141) \n Cash generated by/(used in) investing activities 1,927 (1,445) \n\nFinancing activities: \n Payments for taxes related to net share settlement of equity awards (2,591) (2,316) \n Payments for dividends and dividend equivalents (3,825) (3,768) \n Repurchases of common stock (20,139) (19,475) \n Repayments of term debt (1,401) \n Repayments of commercial paper, net (3,984) (8,214) \n Other (46) (389) \n Cash used in financing activities (30,585) (35,563) \n\nIncrease/(Decrease) in cash, cash equivalents and restricted cash 11,237 (3,003) \nCash, cash equivalents and restricted cash, ending balances $ 41,974 $ 21,974 \n\nSupplemental cash flow disclosure: \n Cash paid for income taxes, net $ 7,255 $ 828 \n\n See accompanying Notes to Condensed Consolidated Financial Statements. \n\n Apple Inc. IQ1 2024 Form 10-Q 1 5 \n<<<\n\n\n Apple Inc. \n\n Notes to Condensed Consolidated Financial Statements (Unaudited) \n\nNote 1 - Summary of Significant Accounting Policies \n\nBasis of Presentation and Preparation \nThe condensed consolidated financial statements include the accounts of Apple Inc. and its wholly owned subsidiaries \n(collectively "Apple" or the "Company"). In the opinion of the Company\'s management, the condensed consolidated financial \nstatements reflect all adjustments, which are normal and recurring in nature, necessary for fair financial statement presentation. \nThe preparation of these condensed consolidated financial statements and accompanying notes in conformity with U.S. generally \naccepted accounting principles ("GAAP") requires the use of management estimates. Certain prior period amounts in the \ncondensed consolidated financial statements and accompanying notes have been reclassified to conform to the current period\'s \npresentation. These condensed consolidated financial statements and accompanying notes should be read in conjunction with \nthe Company\'s annual consolidated financial statements and accompanying notes included in its Annual Report on Form 10-K \nfor the fiscal year ended September 30, 2023 (the "2023 Form 10-K"). \n\nThe Company\'s fiscal year is the 52- or 53-week period that ends on the last Saturday of September. An additional week is \nincluded in the first fiscal quarter every five or six years to realign the Company\'s fiscal quarters with calendar quarters, which \noccurred in the first fiscal quarter of 2023. The Company\'s fiscal years 2024 and 2023 span 52 and 53 weeks, respectively. \nUnless otherwise stated, references to particular years, quarters, months and periods refer to the Company\'s fiscal years ended \nin September and the associated quarters, months and periods of those fiscal years. \n\nNote 2 - Revenue \nNet sales disaggregated by significant products and services for the three months ended December 30, 2023 and December 31, \n2022 were as follows (in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n iPhone® $ 69,702 $ 65,775 \n Mac® 7,780 7,735 \n iPad® 7,023 9,396 \n Wearables, Home and Accessories 11,953 13,482 \n Services 23,117 20,766 \n Total net sales $ 119,575 $ 117,154 \n\nTotal net sales include $3.5 billion of revenue recognized in the three months ended December 30, 2023 that was included in \ndeferred revenue as of September 30, 2023 and $3.4 billion of revenue recognized in the three months ended December 31, \n2022 that was included in deferred revenue as of September 24, 2022. \n\nThe Company\'s proportion of net sales by disaggregated revenue source was generally consistent for each reportable segment \nin Note 10, "Segment Information and Geographic Data" for the three months ended December 30, 2023 and December 31, \n2022, except in Greater China, where iPhone revenue represented a moderately higher proportion of net sales. \n\nAs of December 30, 2023 and September 30, 2023, the Company had total deferred revenue of $12.5 billion and $12.1 billion, \nrespectively. As of December 30, 2023, the Company expects 66% of total deferred revenue to be realized in less than a year, \n26% within one-to-two years, 7% within two-to-three years and 1% in greater than three years. \n\n Apple Inc. I Q1 2024 Form 10-Q | 6 \n<<<\n\n\nNote 3 - Earnings Per Share \nThe following table shows the computation of basic and diluted earnings per share for the three months ended December 30, \n2023 and December 31, 2022 (net income in millions and shares in thousands): \n Three Months Ended \n December 2023 30, December 2022 31, \n\nNumerator: \n Net income $ 33,916 $ 29,998 \n\nDenominator: \n Weighted-average basic shares outstanding 15,509,763 15,892,723 \n Effect of dilutive share-based awards 66,878 62,995 \n Weighted-average diluted shares 15,576,641 15,955,718 \n\nBasic earnings per share $ 2.19 $ 1.89 \nDiluted earnings per share $ 2.18 $ 1.88 \n\nApproximately 89 million restricted stock units ("RSUs") were excluded from the computation of diluted earnings per share for the \nthree months ended December 31, 2022 because their effect would have been antidilutive. \n\nNote 4 - Financial Instruments \n\nCash, Cash Equivalents and Marketable Securities \nThe following tables show the Company\'s cash, cash equivalents and marketable securities by significant investment category \nas of December 30, 2023 and September 30, 2023 (in millions): \n December 30, 2023 \n Cash and Current Non-Current \n Adjusted Cost Unrealized Unrealized Fair Cash Marketable Marketable \n Gains Losses Value Equivalents Securities Securities \n Cash $ 29,542 $ $ - $ 29,542 $ 29,542 $ $ \n\nLevel 1: \n Money market funds 2,000 2,000 2,000 \n Mutual funds 448 35 (11) 472 472 \n Subtotal 2,448 35 (11) 2,472 2,000 472 \n\nLevel 2 (1): \n U.S. Treasury securities 24,041 12 (920) 23,133 7,303 4,858 10,972 \n U.S. agency securities 5,791 (448) 5,343 243 98 5,002 \n Non-U.S. government securities 17,326 54 (675) 16,705 11,175 5,530 \n Certificates of deposit and time deposits 1,448 - - 1,448 1,119 329 \n Commercial paper 1,361 1,361 472 889 \n Corporate debt securities 75,360 112 (3,964) 71,508 81 13,909 57,518 \n Municipal securities 562 (14) 548 185 363 \n Mortgage- and asset-backed securities 22,369 53 (1,907) 20,515 425 20,090 \n Subtotal 148,258 231 (7,928) 140,561 9,218 31,868 99,475 \n Total (2) $ 180,248 $ 266 $ (7,939) $ 172,575 $ 40,760 $ 32,340 $ 99,475 \n\n Apple Inc. IQ1 2024 Form 10-Q 1 7 \n<<<\n\n\n September 30, 2023 \n Cash and Current Non-Current \n Adjusted Cost Unrealized Gains Unrealized Value Fair Cash Marketable Marketable Securities \n Losses Equivalents Securities \n Cash $ 28,359 $ $ $ 28,359 $ 28,359 $ $ \n Level 1: \n Money market funds 481 481 481 \n Mutual funds and equity securities 442 12 (26) 428 428 \n Subtotal 923 12 (26) 909 481 428 \n Level 2 (1): \n U.S. Treasury securities 19,406 (1,292) 18,114 35 5,468 12,611 \n U.S. agency securities 5,736 (600) 5,136 36 271 4,829 \n Non-U.S. government securities 17,533 6 (1,048) 16,491 11,332 5,159 \n Certificates of deposit and time deposits 1,354 - 1,354 1,034 320 \n Commercial paper 608 608 608 \n Corporate debt securities 76,840 6 (5,956) 70,890 20 12,627 58,243 \n Municipal securities 628 (26) 602 192 410 \n Mortgage- and asset-backed securities 22,365 6 (2,735) 19,636 344 19,292 \n Subtotal 144,470 18 (11,657) 132,831 1,125 31,162 100,544 \n Total (2) $ 173,752 $ 30 $ (11,683) $ 162,099 $ 29,965 $ 31,590 $ 100,544 \n\n (1) The valuation techniques used to measure the fair values of the Company\'s Level 2 financial instruments, which generally \n have counterparties with high credit ratings, are based on quoted market prices or model-driven valuations using significant \n inputs derived from or corroborated by observable market data. \n (2) As of December 30, 2023 and September 30, 2023, total marketable securities included $13.9 billion and $13.8 billion, \n respectively, that were restricted from general use, related to the European Commission decision finding that Ireland granted \n state aid to the Company, and other agreements. \n\nThe following table shows the fair value of the Company\'s non-current marketable debt securities, by contractual maturity, as of \nDecember 30, 2023 (in millions): \n\n Due after 1 year through 5 years $ 72,994 \nDue after 5 years through 10 years 9,368 \nDue after 10 years 17,113 \n Total fair value $ 99,475 \n\nDerivative Instruments and Hedging \nThe Company may use derivative instruments to partially offset its business exposure to foreign exchange and interest rate risk. \nHowever, the Company may choose not to hedge certain exposures for a variety of reasons, including accounting considerations \nor the prohibitive economic cost of hedging particular exposures. There can be no assurance the hedges will offset more than a \nportion of the financial impact resulting from movements in foreign exchange or interest rates. \n\nForeign Exchange Rate Risk \nTo protect gross margins from fluctuations in foreign exchange rates, the Company may use forwards, options or other \ninstruments, and may designate these instruments as cash flow hedges. The Company generally hedges portions of its \nforecasted foreign currency exposure associated with revenue and inventory purchases, typically for up to 12 months. \n\nTo protect the Company\'s foreign currency-denominated term debt or marketable securities from fluctuations in foreign \nexchange rates, the Company may use forwards, cross-currency swaps or other instruments. The Company designates these \ninstruments as either cash flow or fair value hedges. As of December 30, 2023, the maximum length of time over which the \nCompany is hedging its exposure to the variability in future cash flows for term debt-related foreign currency transactions is 19 \nyears. \n\nThe Company may also use derivative instruments that are not designated as accounting hedges to protect gross margins from \ncertain fluctuations in foreign exchange rates, as well as to offset a portion of the foreign currency gains and losses generated by \nthe remeasurement of certain assets and liabilities denominated in non-functional currencies. \n\n Apple Inc. IQ1 2024 Form 10-Q 18 \n<<<\n\n\nInterest Rate Risk \nTo protect the Company\'s term debt or marketable securities from fluctuations in interest rates, the Company may use interest \nrate swaps, options or other instruments. The Company designates these instruments as either cash flow or fair value hedges. \n\nThe notional amounts of the Company\'s outstanding derivative instruments as of December 30, 2023 and September 30, 2023 \nwere as follows (in millions): \n December 2023 30, September 2023 30, \n\n Derivative instruments designated as accounting hedges: \n Foreign exchange contracts $ 66,735 $ 74,730 \n Interest rate contracts $ 19,375 $ 19,375 \n\n Derivative instruments not designated as accounting hedges : \n Foreign exchange contracts $ 102,108 $ 104,777 \n\n The carrying amounts of the Company\'s hedged items in fair value hedges as of December 30, 2023 and September 30, 2023 \nwere as follows (in millions): \n December 2023 30, September 2023 30, \n\n Hedged assets/(liabilities): \n Current and non-current marketable securities $ 15,102 $ 14,433 \n Current and non-current term debt $ (18,661) $ (18,247) \n\nAccounts Receivable \n\n Trade Receivables \n The Company\'s third-party cellular network carriers accounted for 34% and 41% of total trade receivables as of December 30, \n2023 and September 30, 2023, respectively. The Company requires third-party credit support or collateral from certain \ncustomers to limit credit risk. \n\n Vendor Non-Trade Receivables \nThe Company has non-trade receivables from certain of its manufacturing vendors resulting from the sale of components to \nthese vendors who manufacture subassemblies or assemble final products for the Company. The Company purchases these \ncomponents directly from suppliers. The Company does not reflect the sale of these components in products net sales. Rather, \nthe Company recognizes any gain on these sales as a reduction of products cost of sales when the related final products are \nsold by the Company. As of December 30, 2023, the Company had two vendors that individually represented 10% or more of \ntotal vendor non-trade receivables, which accounted for 50% and 20%. As of September 30, 2023, the Company had two \nvendors that individually represented 10% or more of total vendor non-trade receivables, which accounted for 48% and 23%. \n\nNote 5 - Condensed Consolidated Financial Statement Details \nThe following table shows the Company\'s condensed consolidated financial statement details as of December 30, 2023 and \nSeptember 30, 2023 (in millions): \n\nProperty, Plant and Equipment, Net \n December 2023 30, September 2023 30, \n\n Gross property, plant and equipment $ 116,176 $ 114,599 \n Accumulated depreciation (72,510) (70,884) \n Total property, plant and equipment, net $ 43,666 $ 43,715 \n\n Apple Inc. IQ1 2024 Form 10-Q 19 \n<<<\n\n\nNote 6 - Debt \n\nCommercial Paper \nThe Company issues unsecured short-term promissory notes pursuant to a commercial paper program. The Company uses net \nproceeds from the commercial paper program for general corporate purposes, including dividends and share repurchases. As of \nDecember 30, 2023 and September 30, 2023, the Company had $2.0 billion and $6.0 billion of commercial paper outstanding, \nrespectively. The following table provides a summary of cash flows associated with the issuance and maturities of commercial \npaper for the three months ended December 30, 2023 and December 31, 2022 (in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\n Maturities 90 days or less: \n Repayments of commercial paper, net $ (3,984) $ (5,569) \n\n Maturities greater than 90 days: \n Repayments of commercial paper - (2,645) \n\n Total repayments of commercial paper, net $ (3,984) $ (8,214) \n\nTerm Debt \nAs of December 30, 2023 and September 30, 2023, the Company had outstanding fixed-rate notes with varying maturities for an \naggregate carrying amount of $106.0 billion and $105.1 billion, respectively (collectively the "Notes"). As of December 30, 2023 \nand September 30, 2023, the fair value of the Company\'s Notes, based on Level 2 inputs, was $96.7 billion and $90.8 billion, \nrespectively. \n\nNote 7 - Shareholders\' Equity \n\nShare Repurchase Program \nDuring the three months ended December 30, 2023, the Company repurchased 118 million shares of its common stock for $20.5 \nbillion. The Company\'s share repurchase program does not obligate the Company to acquire a minimum amount of shares. \nUnder the program, shares may be repurchased in privately negotiated or open market transactions, including under plans \ncomplying with Rule 10b5-1 under the Securities Exchange Act of 1934, as amended (the "Exchange Act"). \n\nNote 8 - Share-Based Compensation \n\nRestricted Stock Units \nA summary of the Company\'s RSU activity and related information for the three months ended December 30, 2023 is as follows: \n Number RSUs of Weighted-Average Grant Date Fair Aggregate Fair Value \n (in thousands) Value Per RSU (in millions) \n Balance as of September 30, 2023 180,247 $ 135.91 \n RSUs granted 74,241 $ 171.58 \n RSUs vested (42,490) $ 110.75 \n RSUs canceled (3,026) $ 109.05 \n Balance as of December 30, 2023 208,972 $ 154.09 $ 40,233 \n\nThe fair value as of the respective vesting dates of RSUs was $7.7 billion and $6.8 billion for the three months ended December \n30, 2023 and December 31, 2022, respectively. \n\n Apple Inc. I Q1 2024 Form 10-Q | 10 \n<<<\n\n\nShare-Based Compensation \nThe following table shows share-based compensation expense and the related income tax benefit included in the Condensed \nConsolidated Statements of Operations for the three months ended December 30, 2023 and December 31, 2022 (in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\nShare-based compensation expense $ 2,997 $ 2,905 \nIncome tax benefit related to share-based compensation expense $ (1,235) $ (1,178) \n\nAs of December 30, 2023, the total unrecognized compensation cost related to outstanding RSUs was $27.4 billion, which the \nCompany expects to recognize over a weighted-average period of 2.9 years. \n\nNote 9 - Contingencies \nThe Company is subject to various legal proceedings and claims that have arisen in the ordinary course of business and that \nhave not been fully resolved. The outcome of litigation is inherently uncertain. In the opinion of management, there was not at \nleast a reasonable possibility the Company may have incurred a material loss, or a material loss greater than a recorded accrual, \nconcerning loss contingencies for asserted legal and other claims. \n\nNote 10 - Segment Information and Geographic Data \nThe following table shows information by reportable segment for the three months ended December 30, 2023 and December 31, \n2022 (in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\nAmericas: \n Net sales $ 50,430 $ 49,278 \n Operating income $ 20,357 $ 17,864 \n\n Europe: \n Net sales $ 30,397 $ 27,681 \n Operating income $ 12,711 $ 10,017 \n\nGreater China: \n Net sales $ 20,819 $ 23,905 \n Operating income $ 8,622 $ 10,437 \n\nJapan: \n Net sales $ 7,767 $ 6,755 \n Operating income $ 3,819 $ 3,236 \n\nRest of Asia Pacific: \n Net sales $ 10,162 $ 9,535 \n Operating income $ 4,579 $ 3,851 \n\n Apple Inc. I Q1 2024 Form 10-Q | 11 \n<<<\n\n\nA reconciliation of the Company\'s segment operating income to the Condensed Consolidated Statements of Operations for the \nthree months ended December 30, 2023 and December 31, 2022 is as follows (in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\n Segment operating income $ 50,088 $ 45,405 \n Research and development expense (7,696) (7,709) \n Other corporate expenses, net (2,019) (1,680) \n Total operating income $ 40,373 $ 36,016 \n\n Apple Inc. I Q1 2024 Form 10-Q | 12 \n<<<\n\n\nItem 2. Management\'s Discussion and Analysis of Financial Condition and Results of Operations \n\nThis Item and other sections of this Quarterly Report on Form 10-Q ("Form 10-Q") contain forward-looking statements, within \nthe meaning of the Private Securities Litigation Reform Act of 1995, that involve risks and uncertainties. Forward-looking \nstatements provide current expectations of future events based on certain assumptions and include any statement that does \nnot directly relate to any historical or current fact. For example, statements in this Form 10-Q regarding the potential future \nimpact of macroeconomic conditions on the Company\'s business and results of operations are forward-looking statements. \nForward-looking statements can also be identified by words such as "future," "anticipates," "believes," "estimates," "expects," \n"intends," "plans," "predicts," "will," "would," "could," "can," "may," and similar terms. Forward-looking statements are not \nguarantees of future performance and the Company\'s actual results may differ significantly from the results discussed in the \nforward-looking statements. Factors that might cause such differences include, but are not limited to, those discussed in Part I, \nItem 1A of the 2023 Form 10-K under the heading "Risk Factors." The Company assumes no obligation to revise or update any \nforward-looking statements for any reason, except as required by law. \n\nUnless otherwise stated, all information presented herein is based on the Company\'s fiscal calendar, and references to \nparticular years, quarters, months or periods refer to the Company\'s fiscal years ended in September and the associated \nquarters, months and periods of those fiscal years. \n\nThe following discussion should be read in conjunction with the 2023 Form 10-K filed with the U.S. Securities and Exchange \nCommission (the "SEC") and the condensed consolidated financial statements and accompanying notes included in Part I, \nItem 1 of this Form 10-Q. \n\nAvailable Information \nThe Company periodically provides certain information for investors on its corporate website, www.apple.com, and its investor \nrelations website, investor.apple.com. This includes press releases and other information about financial performance, \ninformation on environmental, social and governance matters, and details related to the Company\'s annual meeting of \nshareholders. The information contained on the websites referenced in this Form 10-Q is not incorporated by reference into this \nfiling. Further, the Company\'s references to website URLs are intended to be inactive textual references only. \n\nBusiness Seasonality and Product Introductions \nThe Company has historically experienced higher net sales in its first quarter compared to other quarters in its fiscal year due in \npart to seasonal holiday demand. Additionally, new product and service introductions can significantly impact net sales, cost of \nsales and operating expenses. The timing of product introductions can also impact the Company\'s net sales to its indirect \ndistribution channels as these channels are filled with new inventory following a product launch, and channel inventory of an \nolder product often declines as the launch of a newer product approaches. Net sales can also be affected when consumers and \ndistributors anticipate a product introduction. \n\nFiscal Period \nThe Company\'s fiscal year is the 52- or 53-week period that ends on the last Saturday of September. An additional week is \nincluded in the first fiscal quarter every five or six years to realign the Company\'s fiscal quarters with calendar quarters, which \noccurred in the first quarter of 2023. The Company\'s fiscal years 2024 and 2023 span 52 and 53 weeks, respectively. \n\nQuarterly Highlights \nThe Company\'s first quarter of 2024 included 13 weeks, compared to 14 weeks during the first quarter of 2023. \n\nThe Company\'s total net sales increased 2% or $2.4 billion during the first quarter of 2024 compared to the same quarter in \n2023, driven primarily by higher net sales of iPhone and Services, partially offset by lower net sales of iPad and Wearables, \nHome and Accessories. \n\nDuring the first quarter of 2024, the Company announced an updated MacBook Pro® 14-in., MacBook Pro 16-in. and iMac®. \n\nThe Company repurchased $20.5 billion of its common stock and paid dividends and dividend equivalents of $3.8 billion during \nthe first quarter of 2024. \n\nMacroeconomic Conditions \nMacroeconomic conditions, including inflation, changes in interest rates, and currency fluctuations, have directly and indirectly \nimpacted, and could in the future materially impact, the Company\'s results of operations and financial condition. \n\n Apple Inc. I Q1 2024 Form 10-Q | 13 \n<<<\n\n\nSegment Operating Performance \nThe following table shows net sales by reportable segment for the three months ended December 30, 2023 and December 31, \n2022 (dollars in millions): \n Three Months Ended \n December 2023 30, December 2022 31, Change \n\n Net sales by reportable segment: \n Americas $ 50,430 $ 49,278 2 % \n Europe 30,397 27,681 10 % \n Greater China 20,819 23,905 (13)% \n Japan 7,767 6,755 15 % \n Rest of Asia Pacific 10,162 9,535 7 % \n Total net sales $ 119,575 $ 117,154 2 % \n\nAmericas \nAmericas net sales increased 2% or $1.2 billion during the first quarter of 2024 compared to the same quarter in 2023 due \nprimarily to higher net sales of Services and iPhone, partially offset by lower net sales of iPad. The strength in foreign currencies \nrelative to the U.S. dollar had a net favorable year-over-year impact on Americas net sales during the first quarter of 2024. \n\nEurope \nEurope net sales increased 10% or $2.7 billion during the first quarter of 2024 compared to the same quarter in 2023 due \nprimarily to higher net sales of iPhone. The strength in foreign currencies relative to the U.S. dollar had a net favorable year- \nover-year impact on Europe net sales during the first quarter of 2024. \n\nGreater China \nGreater China net sales decreased 13% or $3.1 billion during the first quarter of 2024 compared to the same quarter in 2023 due \nprimarily to lower net sales of iPhone, iPad and Wearables, Home and Accessories. The weakness in the renminbi relative to the \nU.S. dollar had an unfavorable year-over-year impact on Greater China net sales during the first quarter of 2024. \n\nJapan \nJapan net sales increased 15% or $1.0 billion during the first quarter of 2024 compared to the same quarter in 2023 due primarily \nto higher net sales of iPhone. The weakness in the yen relative to the U.S. dollar had an unfavorable year-over-year impact on \nJapan net sales during the first quarter of 2024. \n\nRest of Asia Pacific \nRest of Asia Pacific net sales increased 7% or $627 million during the first quarter of 2024 compared to the same quarter in 2023 \ndue primarily to higher net sales of iPhone, partially offset by lower net sales of Wearables, Home and Accessories. \n\n Apple Inc. I Q1 2024 Form 10-Q | 14 \n<<<\n\n\nProducts and Services Performance \nThe following table shows net sales by category for the three months ended December 30, 2023 and December 31, 2022 \n(dollars in millions): \n Three Months Ended \n December 2023 30, December 2022 31, Change \n\nNet sales by category: \n iPhone $ 69,702 $ 65,775 6 % \n Mac 7,780 7,735 1 % \n iPad 7,023 9,396 (25)% \n Wearables, Home and Accessories 11,953 13,482 (11)% \n Services 23,117 20,766 11 % \n Total net sales $ 119,575 $ 117,154 2 % \n\niPhone \niPhone net sales increased 6% or $3.9 billion during the first quarter of 2024 compared to the same quarter in 2023 due primarily \nto higher net sales of Pro models, partially offset by lower net sales of other models. \n\nMac \nMac net sales were relatively flat during the first quarter of 2024 compared to the same quarter in 2023. \n\niPad \niPad net sales decreased 25% or $2.4 billion during the first quarter of 2024 compared to the same quarter in 2023 due primarily \nto lower net sales of iPad Pro, iPad 9th generation and iPad Air. \n\nWearables, Home and Accessories \nWearables, Home and Accessories net sales decreased 11% or $1.5 billion during the first quarter of 2024 compared to the \nsame quarter in 2023 due primarily to lower net sales of Wearables and Accessories. \n\nServices \nServices net sales increased 11% or $2.4 billion during the first quarter of 2024 compared to the same quarter in 2023 due \nprimarily to higher net sales from advertising, video and cloud services. \n\n Apple Inc. I Q1 2024 Form 10-Q | 15 \n<<<\n\n\nGross Margin \nProducts and Services gross margin and gross margin percentage for the three months ended December 30, 2023 and \nDecember 31, 2022 were as follows (dollars in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\nGross margin: \n Products $ 38,018 $ 35,623 \n Services 16,837 14,709 \n Total gross margin $ 54,855 $ 50,332 \n\n Gross margin percentage: \n Products 39.4% 37.0% \n Services 72.8% 70.8% \n Total gross margin percentage 45.9% 43.0% \n\nProducts Gross Margin \nProducts gross margin increased during the first quarter of 2024 compared to the same quarter in 2023 due primarily to cost \nsavings and a different Products mix, partially offset by the weakness in foreign currencies relative to the U.S. dollar and lower \nProducts volume. \n\nProducts gross margin percentage increased during the first quarter of 2024 compared to the same quarter in 2023 due primarily \nto cost savings and a different Products mix, partially offset by the weakness in foreign currencies relative to the U.S. dollar. \n\nServices Gross Margin \nServices gross margin increased during the first quarter of 2024 compared to the same quarter in 2023 due primarily to higher \nServices net sales and a different Services mix. \n\nServices gross margin percentage increased during the first quarter of 2024 compared to the same quarter in 2023 due primarily \nto a different Services mix. \n\nThe Company\'s future gross margins can be impacted by a variety of factors, as discussed in Part I, Item 1A of the 2023 Form \n10-K under the heading "Risk Factors." As a result, the Company believes, in general, gross margins will be subject to volatility \nand downward pressure. \n\n Apple Inc. I Q1 2024 Form 10-Q | 16 \n<<<\n\n\nOperating Expenses \nOperating expenses for the three months ended December 30, 2023 and December 31, 2022 were as follows (dollars in \nmillions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\n Research and development $ 7,696 $ 7,709 \n Percentage of total net sales 6% 7% \n\n Selling, general and administrative $ 6,786 $ 6,607 \n Percentage of total net sales 6% 6% \n Total operating expenses $ 14,482 $ 14,316 \n Percentage of total net sales 12% 12% \n\nResearch and Development \nResearch and development ("R&D") expense was relatively flat during the first quarter of 2024 compared to the same quarter in \n2023. \n\nSelling, General and Administrative \nSelling, general and administrative expense increased 3% or $179 million during the first quarter of 2024 compared to the same \nquarter in 2023. \n\nProvision for Income Taxes \nProvision for income taxes, effective tax rate and statutory federal income tax rate for the three months ended December 30, \n2023 and December 31, 2022 were as follows (dollars in millions): \n Three Months Ended \n December 2023 30, December 2022 31, \n\n Provision for income taxes $ 6,407 $ 5,625 \n Effective tax rate 15.9% 15.8% \n Statutory federal income tax rate 21% 21% \n\nThe Company\'s effective tax rate for the first quarter of 2024 was lower than the statutory federal income tax rate due primarily to \na lower effective tax rate on foreign earnings, tax benefits from share-based compensation, and the impact of the U.S. federal \nR&D credit, partially offset by state income taxes. \n\nThe Company\'s effective tax rate for the first quarter of 2024 was relatively flat compared to the same quarter in 2023. \n\nLiquidity and Capital Resources \nThe Company believes its balances of cash, cash equivalents and unrestricted marketable securities, along with cash generated \nby ongoing operations and continued access to debt markets, will be sufficient to satisfy its cash requirements and capital return \nprogram over the next 12 months and beyond. \n\nThe Company\'s contractual cash requirements have not changed materially since the 2023 Form 10-K, except for manufacturing \npurchase obligations. \n\nManufacturing Purchase Obligations \nThe Company utilizes several outsourcing partners to manufacture subassemblies for the Company\'s products and to perform \nfinal assembly and testing of finished products. The Company also obtains individual components for its products from a wide \nvariety of individual suppliers. As of December 30, 2023, the Company had manufacturing purchase obligations of $38.0 billion, \nwith $37.9 billion payable within 12 months. \n\n Apple Inc. I Q1 2024 Form 10-Q | 17 \n<<<\n\n\nCapital Return Program \nIn addition to its contractual cash requirements, the Company has an authorized share repurchase program. The program does \nnot obligate the Company to acquire a minimum amount of shares. As of December 30, 2023, the Company\'s quarterly cash \ndividend was $0.24 per share. The Company intends to increase its dividend on an annual basis, subject to declaration by the \nBoard of Directors. \n\nRecent Accounting Pronouncements \n\nIncome Taxes \nIn December 2023, the Financial Accounting Standards Board (the "FASB") issued Accounting Standards Update ("ASU") No. \n2023-09, Income Taxes (Topic 740): Improvements to Income Tax Disclosures ("ASU 2023-09"), which will require the Company \nto disclose specified additional information in its income tax rate reconciliation and provide additional information for reconciling \nitems that meet a quantitative threshold. ASU 2023-09 will also require the Company to disaggregate its income taxes paid \ndisclosure by federal, state and foreign taxes, with further disaggregation required for significant individual jurisdictions. The \nCompany will adopt ASU 2023-09 in its fourth quarter of 2026. ASU 2023-09 allows for adoption using either a prospective or \nretrospective transition method. \n\nSegment Reporting \nIn November 2023, the FASB issued ASU No. 2023-07, Segment Reporting (Topic 280): Improvements to Reportable Segment \nDisclosures ("ASU 2023-07\'), which will require the Company to disclose segment expenses that are significant and regularly \nprovided to the Company\'s chief operating decision maker ("CODM"). In addition, ASU 2023-07 will require the Company to \ndisclose the title and position of its CODM and how the CODM uses segment profit or loss information in assessing segment \nperformance and deciding how to allocate resources. The Company will adopt ASU 2023-07 in its fourth quarter of 2025 using a \nretrospective transition method. \n\nCritical Accounting Estimates \nThe preparation of financial statements and related disclosures in conformity with GAAP and the Company\'s discussion and \nanalysis of its financial condition and operating results require the Company\'s management to make judgments, assumptions \nand estimates that affect the amounts reported. Note 1, "Summary of Significant Accounting Policies" of the Notes to Condensed \nConsolidated Financial Statements in Part I, Item 1 of this Form 10-Q and in the Notes to Consolidated Financial Statements in \nPart II, Item 8 of the 2023 Form 10-K describe the significant accounting policies and methods used in the preparation of the \nCompany\'s condensed consolidated financial statements. There have been no material changes to the Company\'s critical \naccounting estimates since the 2023 Form 10-K. \n\nItem 3. Quantitative and Qualitative Disclosures About Market Risk \n\nThere have been no material changes to the Company\'s market risk during the first three months of 2024. For a discussion of the \nCompany\'s exposure to market risk, refer to the Company\'s market risk disclosures set forth in Part II, Item 7A, "Quantitative and \nQualitative Disclosures About Market Risk" of the 2023 Form 10-K. \n\nItem 4. Controls and Procedures \n\nEvaluation of Disclosure Controls and Procedures \nBased on an evaluation under the supervision and with the participation of the Company\'s management, the Company\'s principal \nexecutive officer and principal financial officer have concluded that the Company\'s disclosure controls and procedures as defined \nin Rules 13a-15(e) and 15d-15(e) under the Exchange Act were effective as of December 30, 2023 to provide reasonable \nassurance that information required to be disclosed by the Company in reports that it files or submits under the Exchange Act is \n(i) recorded, processed, summarized and reported within the time periods specified in the SEC rules and forms and \n(ii) accumulated and communicated to the Company\'s management, including its principal executive officer and principal \nfinancial officer, as appropriate to allow timely decisions regarding required disclosure. \n\nChanges in Internal Control over Financial Reporting \nThere were no changes in the Company\'s internal control over financial reporting during the first quarter of 2024, which were \nidentified in connection with management\'s evaluation required by paragraph (d) of Rules 13a-15 and 15d-15 under the \nExchange Act, that have materially affected, or are reasonably likely to materially affect, the Company\'s internal control over \nfinancial reporting. \n\n Apple Inc. I Q1 2024 Form 10-Q | 18 \n<<<\n\n\nPART II - OTHER INFORMATION \n\nItem 1. Legal Proceedings \n\nEpic Games \nEpic Games, Inc. ("Epic") filed a lawsuit in the U.S. District Court for the Northern District of California (the "District Court") \nagainst the Company alleging violations of federal and state antitrust laws and California\'s unfair competition law based upon the \nCompany\'s operation of its App Store®. On September 10, 2021, the District Court ruled in favor of the Company with respect to \nnine out of the ten counts included in Epic\'s claim. The District Court found that certain provisions of the Company\'s App Store \nReview Guidelines violate California\'s unfair competition law and issued an injunction enjoining the Company from prohibiting \ndevelopers from including in their apps external links that direct customers to purchasing mechanisms other than Apple in-app \npurchasing. The injunction applies to apps on the U.S. storefront of the iOS and iPadOS® App Store. On April 24, 2023, the U.S. \nCourt of Appeals for the Ninth Circuit (the "Circuit Court") affirmed the District Court\'s ruling. On June 7, 2023, the Company and \nEpic filed petitions with the Circuit Court requesting further review of the decision. On June 30, 2023, the Circuit Court denied \nboth petitions. On July 17, 2023, the Circuit Court granted Apple\'s motion to stay enforcement of the injunction pending appeal to \nthe U.S. Supreme Court (the "Supreme Court"). On January 16, 2024, the Supreme Court denied both the Company\'s and Epic\'s \npetitions and the stay terminated. The Supreme Court\'s denial of Epic\'s petition confirms the District Court\'s ruling in favor of the \nCompany with respect to all of the antitrust claims. Following termination of the stay, the Company implemented a plan to comply \nwith the injunction and filed a statement of compliance with the District Court. On January 31, 2024, Epic filed a notice with the \nDistrict Court indicating its intent to dispute the Company\'s compliance plan. \n\nMasimo \nMasimo Corporation and Cercacor Laboratories, Inc. (together, "Masimo") filed a complaint before the U.S. International Trade \nCommission (the "ITC") alleging infringement by the Company of five patents relating to the functionality of the blood oxygen \nfeature in Apple Watch® Series 6 and 7. In its complaint, Masimo sought a permanent exclusion order prohibiting importation to \nthe U.S. of certain Apple Watch models that include blood oxygen sensing functionality. On October 26, 2023, the ITC entered a \nlimited exclusion order (the "Order") prohibiting importation and sales in the U.S. of Apple Watch models with blood oxygen \nsensing functionality, which includes Apple Watch Series 9 and Apple Watch Ultra™ 2. The Company subsequently proposed a \nredesign of Apple Watch Series 9 and Apple Watch Ultra 2 to the U.S. Customs and Border Protection (the "CBP") and appealed \nthe Order. On January 12, 2024, the CBP found that the Company\'s proposed redesign of Apple Watch Series 9 and Apple \nWatch Ultra 2 falls outside the scope of the Order, permitting the Company to import and sell the models in the U.S. \n\nOther Legal Proceedings \nThe Company is subject to other legal proceedings and claims that have not been fully resolved and that have arisen in the \nordinary course of business. The Company settled certain matters during the first quarter of 2024 that did not individually or in \nthe aggregate have a material impact on the Company\'s financial condition or operating results. The outcome of litigation is \ninherently uncertain. If one or more legal matters were resolved against the Company in a reporting period for amounts above \nmanagement\'s expectations, the Company\'s financial condition and operating results for that reporting period could be materially \nadversely affected. \n\nItem 1A. Risk Factors \n\nThe Company\'s business, reputation, results of operations, financial condition and stock price can be affected by a number of \nfactors, whether currently known or unknown, including those described in Part I, Item 1A of the 2023 Form 10-K under the \nheading "Risk Factors." When any one or more of these risks materialize from time to time, the Company\'s business, reputation, \nresults of operations, financial condition and stock price can be materially and adversely affected. Except as set forth below, \nthere have been no material changes to the Company\'s risk factors since the 2023 Form 10-K. \n\nThe technology industry, including, in some instances, the Company, is subject to intense media, political and regulatory \nscrutiny, which exposes the Company to increasing regulation, government investigations, legal actions and penalties. \nFrom time to time, the Company has made changes to its App Store, including actions taken in response to litigation, \ncompetition, market conditions and legal and regulatory requirements. The Company expects to make further business changes \nin the future. For example, in the U.S. the Company has implemented changes to how developers communicate with consumers \nwithin apps on the U.S. storefront of the iOS and iPadOS App Store regarding alternative purchasing mechanisms. \n\n Apple Inc. I Q1 2024 Form 10-Q | 19 \n<<<\n\n\n In January 2024, the Company announced changes to iOS, the App Store and Safari® in the European Union to comply with the \n Digital Markets Act (the "DMA"), including new business terms and alternative fee structures for iOS apps, alternative methods of \n distribution for iOS apps, alternative payment processing for apps across the Company\'s operating systems, and additional tools \n and application programming interfaces ("APIs") for developers. Although the Company\'s compliance plan is intended to address \n the DMA\'s obligations, it is still subject to potential challenge by the European Commission or private litigants. In addition, other \n jurisdictions may seek to require the Company to make changes to its business. While the changes introduced by the Company \n in the European Union are intended to reduce new privacy and security risks the DMA poses to European Union users, many \n risks will remain. \n\n The Company is also currently subject to antitrust investigations in various jurisdictions around the world, which can result in \n legal proceedings and claims against the Company that could, individually or in the aggregate, have a materially adverse impact \n on the Company\'s business, results of operations and financial condition. For example, the Company is the subject of \n investigations in Europe and other jurisdictions relating to App Store terms and conditions. If such investigations result in adverse \nfindings against the Company, the Company could be exposed to significant fines and may be required to make further changes \n to its App Store business, all of which could materially adversely affect the Company\'s business, results of operations and \n financial condition. \n\n Further, the Company has commercial relationships with other companies in the technology industry that are or may become \n subject to investigations and litigation that, if resolved against those other companies, could materially adversely affect the \n Company\'s commercial relationships with those business partners and materially adversely affect the Company\'s business, \n results of operations and financial condition. For example, the Company earns revenue from licensing arrangements with other \n companies to offer their search services on the Company\'s platforms and applications, and certain of these arrangements are \n currently subject to government investigations and legal proceedings. \n\n There can be no assurance the Company\'s business will not be materially adversely affected, individually or in the aggregate, by \nthe outcomes of such investigations, litigation or changes to laws and regulations in the future. Changes to the Company\'s \n business practices to comply with new laws and regulations or in connection with other legal proceedings can negatively impact \n the reputation of the Company\'s products for privacy and security and otherwise adversely affect the experience for users of the \n Company\'s products and services, and result in harm to the Company\'s reputation, loss of competitive advantage, poor market \n acceptance, reduced demand for products and services, and lost sales. \n\n Item 2. Unregistered Sales of Equity Securities and Use of Proceeds \n\n Purchases of Equity Securities by the Issuer and Affiliated Purchasers \n Share repurchase activity during the three months ended December 30, 2023 was as follows (in millions, except number of \n shares, which are reflected in thousands, and per-share amounts): \n Total of Shares Number \n Purchased as Dollar Approximate Value of \n Total Number Average Price Part Announced of Publicly Yet Shares Be Purchased That May \n of Shares Paid Per Plans or Under the Plans \n Periods Purchased Share Programs or Programs (1) \n October 1, 2023 to November 4, 2023: \n August 2023 ASRs 6,498 (2) 6,498 \n Open market and privately negotiated purchases 45,970 $ 174.03 45,970 \n\n November 5, 2023 to December 2, 2023: \n Open market and privately negotiated purchases 33,797 $ 187.14 33,797 \n\n December 3, 2023 to December 30, 2023: \n Open market and privately negotiated purchases 31,782 $ 194.29 31,782 \n Total 118,047 $ 53,569 \n\n (1) As of December 30, 2023, the Company was authorized by the Board of Directors to purchase up to $90 billion of the \n Company\'s common stock under a share repurchase program announced on May 4, 2023, of which $36.4 billion had been \n utilized. The program does not obligate the Company to acquire a minimum amount of shares. Under the program, shares \n may be repurchased in privately negotiated or open market transactions, including under plans complying with Rule 10b5-1 \n under the Exchange Act. \n (2) In August 2023, the Company entered into accelerated share repurchase agreements ("ASRs") to purchase up to a total of \n $5.0 billion of the Company\'s common stock. In October 2023, the purchase periods for these ASRs ended and an additional \n 6 million shares were delivered and retired. In total, 29 million shares were delivered under these ASRs at an average \n repurchase price of $174.93 per share. \n\n Apple Inc. I Q1 2024 Form 10-Q | 20 \n<<<\n\n\nItem 3. Defaults Upon Senior Securities \n\nNone. \n\nItem 4. Mine Safety Disclosures \n\nNot applicable. \n\nItem 5. Other Information \n\nInsider Trading Arrangements \nOn November 11, 2023 and November 27, 2023, respectively, Luca Maestri, the Company\'s Senior Vice President and Chief \nFinancial Officer, and Katherine L. Adams, the Company\'s Senior Vice President and General Counsel, each entered into a \ntrading plan intended to satisfy the affirmative defense conditions of Rule 10b5-1(c) under the Exchange Act. The plans provide \nfor the sale of all shares vested during the duration of the plans pursuant to certain equity awards granted to Mr. Maestri and Ms. \nAdams, respectively, excluding any shares withheld by the Company to satisfy income tax withholding and remittance \nobligations. Mr. Maestri\'s plan will expire on December 31, 2024, and Ms. Adams\'s plan will expire on November 1, 2024, subject \nto early termination for certain specified events set forth in the plans. \n\nItem 6. Exhibits \n Incorporated by Reference \n\n Number Exhibit Form Filing Period Date/ End \n Exhibit Description Exhibit Date \n 31.1* Rule 13a-14(a) / 15d-14(a) Certification of Chief Executive Officer. \n 31.2* Rule 13a-14(a) / 15d-14(a) Certification of Chief Financial Officer. \n 32.1 ** Section 1350 Certifications of Chief Executive Officer and Chief Financial Officer. \n 101* Inline XBRL Document Set for the condensed consolidated financial statements \n and accompanying notes in Part I, Item 1, "Financial Statements" of this \n Quarterly Report on Form 10-Q. \n 104* Inline the Exhibit XBRL for 101 the Inline cover XBRL page Document of this Quarterly Set. Report on Form 10-Q, included in \n\n * Filed herewith. \n ** Furnished herewith. \n\n Apple Inc. I Q1 2024 Form 10-Q | 21 \n<<<\n\n\n SIGNATURE \n\n Pursuant to the requirements of the Securities Exchange Act of 1934, the Registrant has duly caused this report to be signed on \nits behalf by the undersigned thereunto duly authorized. \n\n Date: February 1, 2024 Apple Inc. \n\n By: /s/ Luca Maestri \n Luca Maestri \n Senior Vice President, \n Chief Financial Officer \n\n Apple Inc. I Q1 2024 Form 10-Q | 22 \n<<<\n\n\n Exhibit 31.1 \n\n CERTIFICATION \n\nI, Timothy D. Cook, certify that: \n\n1. I have reviewed this quarterly report on Form 10-Q of Apple Inc .; \n\n2. Based on my knowledge, this report does not contain any untrue statement of a material fact or omit to state a material fact \n necessary to make the statements made, in light of the circumstances under which such statements were made, not \n misleading with respect to the period covered by this report; \n\n3. Based on my knowledge, the financial statements, and other financial information included in this report, fairly present in all \n material respects the financial condition, results of operations and cash flows of the Registrant as of, and for, the periods \n presented in this report; \n\n4. The Registrant\'s other certifying officer(s) and I are responsible for establishing and maintaining disclosure controls and \n procedures (as defined in Exchange Act Rules 13a-15(e) and 15d-15(e)) and internal control over financial reporting (as \n defined in Exchange Act Rules 13a-15(f) and 15d-15(f)) for the Registrant and have: \n\n (a) Designed such disclosure controls and procedures, or caused such disclosure controls and procedures to be \n designed under our supervision, to ensure that material information relating to the Registrant, including its \n consolidated subsidiaries, is made known to us by others within those entities, particularly during the period in \n which this report is being prepared; \n\n (b) Designed such internal control over financial reporting, or caused such internal control over financial reporting \n to be designed under our supervision, to provide reasonable assurance regarding the reliability of financial \n reporting and the preparation of financial statements for external purposes in accordance with generally \n accepted accounting principles; \n\n (c) Evaluated the effectiveness of the Registrant\'s disclosure controls and procedures and presented in this report \n our conclusions about the effectiveness of the disclosure controls and procedures, as of the end of the period \n covered by this report based on such evaluation; and \n\n (d) Disclosed in this report any change in the Registrant\'s internal control over financial reporting that occurred \n during the Registrant\'s most recent fiscal quarter (the Registrant\'s fourth fiscal quarter in the case of an annual \n report) that has materially affected, or is reasonably likely to materially affect, the Registrant\'s internal control \n over financial reporting; and \n\n5. The Registrant\'s other certifying officer(s) and I have disclosed, based on our most recent evaluation of internal control over \n financial reporting, to the Registrant\'s auditors and the audit committee of the Registrant\'s board of directors (or persons \n performing the equivalent functions): \n\n (a) All significant deficiencies and material weaknesses in the design or operation of internal control over financial \n reporting which are reasonably likely to adversely affect the Registrant\'s ability to record, process, summarize \n and report financial information; and \n\n (b) Any fraud, whether or not material, that involves management or other employees who have a significant role \n in the Registrant\'s internal control over financial reporting. \n\nDate: February 1, 2024 \n\n By: /s/ Timothy D. Cook \n Timothy D. Cook \n Chief Executive Officer \n<<<\n\n\n Exhibit 31.2 \n\n CERTIFICATION \n\nI, Luca Maestri, certify that: \n\n1. I have reviewed this quarterly report on Form 10-Q of Apple Inc .; \n\n2. Based on my knowledge, this report does not contain any untrue statement of a material fact or omit to state a material fact \n necessary to make the statements made, in light of the circumstances under which such statements were made, not \n misleading with respect to the period covered by this report; \n\n3. Based on my knowledge, the financial statements, and other financial information included in this report, fairly present in all \n material respects the financial condition, results of operations and cash flows of the Registrant as of, and for, the periods \n presented in this report; \n\n4. The Registrant\'s other certifying officer(s) and I are responsible for establishing and maintaining disclosure controls and \n procedures (as defined in Exchange Act Rules 13a-15(e) and 15d-15(e)) and internal control over financial reporting (as \n defined in Exchange Act Rules 13a-15(f) and 15d-15(f)) for the Registrant and have: \n\n (a) Designed such disclosure controls and procedures, or caused such disclosure controls and procedures to be \n designed under our supervision, to ensure that material information relating to the Registrant, including its \n consolidated subsidiaries, is made known to us by others within those entities, particularly during the period in \n which this report is being prepared; \n\n (b) Designed such internal control over financial reporting, or caused such internal control over financial reporting \n to be designed under our supervision, to provide reasonable assurance regarding the reliability of financial \n reporting and the preparation of financial statements for external purposes in accordance with generally \n accepted accounting principles; \n\n (c) Evaluated the effectiveness of the Registrant\'s disclosure controls and procedures and presented in this report \n our conclusions about the effectiveness of the disclosure controls and procedures, as of the end of the period \n covered by this report based on such evaluation; and \n\n (d) Disclosed in this report any change in the Registrant\'s internal control over financial reporting that occurred \n during the Registrant\'s most recent fiscal quarter (the Registrant\'s fourth fiscal quarter in the case of an annual \n report) that has materially affected, or is reasonably likely to materially affect, the Registrant\'s internal control \n over financial reporting; and \n\n5. The Registrant\'s other certifying officer(s) and I have disclosed, based on our most recent evaluation of internal control over \n financial reporting, to the Registrant\'s auditors and the audit committee of the Registrant\'s board of directors (or persons \n performing the equivalent functions): \n\n (a) All significant deficiencies and material weaknesses in the design or operation of internal control over financial \n reporting which are reasonably likely to adversely affect the Registrant\'s ability to record, process, summarize \n and report financial information; and \n\n (b) Any fraud, whether or not material, that involves management or other employees who have a significant role \n in the Registrant\'s internal control over financial reporting. \n\nDate: February 1, 2024 \n\n By: /s/ Luca Maestri \n Luca Maestri \n Senior Vice President, \n Chief Financial Officer \n<<<\n\n\n Exhibit 32.1 \n\n CERTIFICATIONS OF CHIEF EXECUTIVE OFFICER AND CHIEF FINANCIAL OFFICER \n PURSUANT TO \n 18 U.S.C. SECTION 1350, \n AS ADOPTED PURSUANT TO \n SECTION 906 OF THE SARBANES-OXLEY ACT OF 2002 \n\nI, Timothy D. Cook, certify, as of the date hereof, pursuant to 18 U.S.C. Section 1350, as adopted pursuant to Section 906 of the \nSarbanes-Oxley Act of 2002, that the Quarterly Report of Apple Inc. on Form 10-Q for the period ended December 30, 2023 fully \ncomplies with the requirements of Section 13(a) or 15(d) of the Securities Exchange Act of 1934 and that information contained \nin such Form 10-Q fairly presents in all material respects the financial condition and results of operations of Apple Inc. at the \ndates and for the periods indicated. \n\nDate: February 1, 2024 \n\n By: /s/ Timothy D. Cook \n Timothy D. Cook \n Chief Executive Officer \n\nI, Luca Maestri, certify, as of the date hereof, pursuant to 18 U.S.C. Section 1350, as adopted pursuant to Section 906 of the \nSarbanes-Oxley Act of 2002, that the Quarterly Report of Apple Inc. on Form 10-Q for the period ended December 30, 2023 fully \ncomplies with the requirements of Section 13(a) or 15(d) of the Securities Exchange Act of 1934 and that information contained \nin such Form 10-Q fairly presents in all material respects the financial condition and results of operations of Apple Inc. at the \ndates and for the periods indicated. \n\nDate: February 1, 2024 \n\n By: /s/ Luca Maestri \n Luca Maestri \n Senior Vice President, \n Chief Financial Officer \n\nA signed original of this written statement required by Section 906 has been provided to Apple Inc. and will be retained by Apple \nInc. and furnished to the Securities and Exchange Commission or its staff upon request. \n<<<\n" diff --git a/backend/workflow_manager/endpoint/tests/test_database_utils/test_create_table_if_not_exists.py b/backend/workflow_manager/endpoint/tests/test_database_utils/test_create_table_if_not_exists.py deleted file mode 100644 index 90d82a357..000000000 --- a/backend/workflow_manager/endpoint/tests/test_database_utils/test_create_table_if_not_exists.py +++ /dev/null @@ -1,97 +0,0 @@ -import pytest # type: ignore -from workflow_manager.endpoint.database_utils import DatabaseUtils -from workflow_manager.endpoint.exceptions import UnstractDBException - -from unstract.connectors.databases.unstract_db import UnstractDB - -from .base_test_db import BaseTestDB - - -class TestCreateTableIfNotExists(BaseTestDB): - def test_create_table_if_not_exists_valid( - self, valid_dbs_instance: UnstractDB - ) -> None: - engine = valid_dbs_instance.get_engine() - result = DatabaseUtils.create_table_if_not_exists( - db_class=valid_dbs_instance, - engine=engine, - table_name=self.valid_table_name, - database_entry=self.database_entry, - ) - assert result is None - - def test_create_table_if_not_exists_bigquery_valid( - self, valid_bigquery_db_instance: UnstractDB - ) -> None: - engine = valid_bigquery_db_instance.get_engine() - result = DatabaseUtils.create_table_if_not_exists( - db_class=valid_bigquery_db_instance, - engine=engine, - table_name=self.valid_bigquery_table_name, - database_entry=self.database_entry, - ) - assert result is None - - def test_create_table_if_not_exists_invalid_schema( - self, invalid_dbs_instance: UnstractDB - ) -> None: - engine = invalid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.create_table_if_not_exists( - db_class=invalid_dbs_instance, - engine=engine, - table_name=self.valid_table_name, - database_entry=self.database_entry, - ) - - def test_create_table_if_not_exists_invalid_syntax( - self, valid_dbs_instance: UnstractDB - ) -> None: - engine = valid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.create_table_if_not_exists( - db_class=valid_dbs_instance, - engine=engine, - table_name=self.invalid_syntax_table_name, - database_entry=self.database_entry, - ) - - def test_create_table_if_not_exists_wrong_table_name( - self, valid_dbs_instance: UnstractDB - ) -> None: - engine = valid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.create_table_if_not_exists( - db_class=valid_dbs_instance, - engine=engine, - table_name=self.invalid_wrong_table_name, - database_entry=self.database_entry, - ) - - def test_create_table_if_not_exists_feature_not_supported( - self, invalid_dbs_instance: UnstractDB - ) -> None: - engine = invalid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.create_table_if_not_exists( - db_class=invalid_dbs_instance, - engine=engine, - table_name=self.invalid_wrong_table_name, - database_entry=self.database_entry, - ) - - def test_create_table_if_not_exists_invalid_snowflake_db( - self, invalid_snowflake_db_instance: UnstractDB - ) -> None: - engine = invalid_snowflake_db_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.create_table_if_not_exists( - db_class=invalid_snowflake_db_instance, - engine=engine, - table_name=self.invalid_wrong_table_name, - database_entry=self.database_entry, - ) - - -if __name__ == "__main__": - pytest.main() diff --git a/backend/workflow_manager/endpoint/tests/test_database_utils/test_execute_write_query.py b/backend/workflow_manager/endpoint/tests/test_database_utils/test_execute_write_query.py deleted file mode 100644 index c2861581d..000000000 --- a/backend/workflow_manager/endpoint/tests/test_database_utils/test_execute_write_query.py +++ /dev/null @@ -1,169 +0,0 @@ -import os -import uuid -from typing import Any - -import pytest # type: ignore -from workflow_manager.endpoint.database_utils import DatabaseUtils -from workflow_manager.endpoint.exceptions import UnstractDBException - -from unstract.connectors.databases.redshift import Redshift -from unstract.connectors.databases.unstract_db import UnstractDB - -from .base_test_db import BaseTestDB - - -class TestExecuteWriteQuery(BaseTestDB): - @pytest.fixture(autouse=True) - def setup(self, base_setup: Any) -> None: - self.sql_columns_and_values = { - "created_by": "Unstract/DBWriter", - "created_at": "2024-05-20 10:36:25.362609", - "data": '{"input_file": "simple.pdf", "result": "report"}', - "id": str(uuid.uuid4()), - } - - def test_execute_write_query_valid(self, valid_dbs_instance: Any) -> None: - engine = valid_dbs_instance.get_engine() - result = DatabaseUtils.execute_write_query( - db_class=valid_dbs_instance, - engine=engine, - table_name=self.valid_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - assert result is None - - def test_execute_write_query_invalid_schema( - self, invalid_dbs_instance: Any - ) -> None: - engine = invalid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.execute_write_query( - db_class=invalid_dbs_instance, - engine=engine, - table_name=self.valid_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - - def test_execute_write_query_invalid_syntax(self, valid_dbs_instance: Any) -> None: - engine = valid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.execute_write_query( - db_class=valid_dbs_instance, - engine=engine, - table_name=self.invalid_syntax_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - - def test_execute_write_query_feature_not_supported( - self, invalid_dbs_instance: Any - ) -> None: - engine = invalid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.execute_write_query( - db_class=invalid_dbs_instance, - engine=engine, - table_name=self.invalid_wrong_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - - def load_text_to_sql_values(self) -> dict[str, Any]: - file_path = os.path.join(os.path.dirname(__file__), "static", "large_doc.txt") - with open(file_path, encoding="utf-8") as file: - content = file.read() - sql_columns_and_values = self.sql_columns_and_values.copy() - sql_columns_and_values["data"] = content - return sql_columns_and_values - - @pytest.fixture - def valid_redshift_db_instance(self) -> Any: - return Redshift(self.valid_redshift_creds) - - def test_execute_write_query_datatype_too_large_redshift( - self, valid_redshift_db_instance: Any - ) -> None: - engine = valid_redshift_db_instance.get_engine() - sql_columns_and_values = self.load_text_to_sql_values() - with pytest.raises(UnstractDBException): - DatabaseUtils.execute_write_query( - db_class=valid_redshift_db_instance, - engine=engine, - table_name=self.valid_table_name, - sql_keys=list(sql_columns_and_values.keys()), - sql_values=list(sql_columns_and_values.values()), - ) - - def test_execute_write_query_bigquery_valid( - self, valid_bigquery_db_instance: Any - ) -> None: - engine = valid_bigquery_db_instance.get_engine() - result = DatabaseUtils.execute_write_query( - db_class=valid_bigquery_db_instance, - engine=engine, - table_name=self.valid_bigquery_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - assert result is None - - def test_execute_write_query_wrong_table_name( - self, valid_dbs_instance: UnstractDB - ) -> None: - engine = valid_dbs_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.execute_write_query( - db_class=valid_dbs_instance, - engine=engine, - table_name=self.invalid_wrong_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - - def test_execute_write_query_bigquery_large_doc( - self, valid_bigquery_db_instance: Any - ) -> None: - engine = valid_bigquery_db_instance.get_engine() - sql_columns_and_values = self.load_text_to_sql_values() - result = DatabaseUtils.execute_write_query( - db_class=valid_bigquery_db_instance, - engine=engine, - table_name=self.valid_bigquery_table_name, - sql_keys=list(sql_columns_and_values.keys()), - sql_values=list(sql_columns_and_values.values()), - ) - assert result is None - - def test_execute_write_query_invalid_snowflake_db( - self, invalid_snowflake_db_instance: UnstractDB - ) -> None: - engine = invalid_snowflake_db_instance.get_engine() - with pytest.raises(UnstractDBException): - DatabaseUtils.execute_write_query( - db_class=invalid_snowflake_db_instance, - engine=engine, - table_name=self.invalid_wrong_table_name, - sql_keys=list(self.sql_columns_and_values.keys()), - sql_values=list(self.sql_columns_and_values.values()), - ) - - # Make this function at last to cover all large doc - def test_execute_write_query_large_doc( - self, valid_dbs_instance_to_handle_large_doc: Any - ) -> None: - engine = valid_dbs_instance_to_handle_large_doc.get_engine() - sql_columns_and_values = self.load_text_to_sql_values() - result = DatabaseUtils.execute_write_query( - db_class=valid_dbs_instance_to_handle_large_doc, - engine=engine, - table_name=self.valid_table_name, - sql_keys=list(sql_columns_and_values.keys()), - sql_values=list(sql_columns_and_values.values()), - ) - assert result is None - - -if __name__ == "__main__": - pytest.main() diff --git a/backend/workflow_manager/endpoint/urls.py b/backend/workflow_manager/endpoint/urls.py deleted file mode 100644 index 1b658b27c..000000000 --- a/backend/workflow_manager/endpoint/urls.py +++ /dev/null @@ -1,23 +0,0 @@ -from django.urls import path -from workflow_manager.endpoint.views import WorkflowEndpointViewSet - -workflow_endpoint_list = WorkflowEndpointViewSet.as_view( - {"get": "workflow_endpoint_list"} -) -endpoint_list = WorkflowEndpointViewSet.as_view({"get": "list"}) -workflow_endpoint_detail = WorkflowEndpointViewSet.as_view( - {"get": "retrieve", "put": "update"} -) -endpoint_settings_detail = WorkflowEndpointViewSet.as_view( - {"get": WorkflowEndpointViewSet.get_settings.__name__} -) - -urlpatterns = [ - path("", endpoint_list, name="endpoint-list"), - path("/", workflow_endpoint_detail, name="workflow-endpoint-detail"), - path( - "/settings/", - endpoint_settings_detail, - name="workflow-endpoint-detail", - ), -] diff --git a/backend/workflow_manager/endpoint/views.py b/backend/workflow_manager/endpoint/views.py deleted file mode 100644 index 50375d97f..000000000 --- a/backend/workflow_manager/endpoint/views.py +++ /dev/null @@ -1,82 +0,0 @@ -from django.db.models import QuerySet -from rest_framework import status, viewsets -from rest_framework.decorators import action -from rest_framework.request import Request -from rest_framework.response import Response -from workflow_manager.endpoint.destination import DestinationConnector -from workflow_manager.endpoint.endpoint_utils import WorkflowEndpointUtils -from workflow_manager.endpoint.models import WorkflowEndpoint -from workflow_manager.endpoint.source import SourceConnector -from workflow_manager.workflow.serializers import WorkflowEndpointSerializer - - -class WorkflowEndpointViewSet(viewsets.ModelViewSet): - queryset = WorkflowEndpoint.objects.all() - serializer_class = WorkflowEndpointSerializer - - def get_queryset(self) -> QuerySet: - - queryset = ( - WorkflowEndpoint.objects.all() - .select_related("workflow") - .filter(workflow__created_by=self.request.user) - ) - endpoint_type_filter = self.request.query_params.get("endpoint_type", None) - connection_type_filter = self.request.query_params.get("connection_type", None) - if endpoint_type_filter: - queryset = queryset.filter(endpoint_type=endpoint_type_filter) - if connection_type_filter: - queryset = queryset.filter(connection_type=connection_type_filter) - return queryset - - @action(detail=True, methods=["get"]) - def get_settings(self, request: Request, pk: str) -> Response: - """Retrieve the settings/schema for a specific workflow endpoint. - - Parameters: - request (Request): The HTTP request object. - pk (str): The primary key of the workflow endpoint. - - Returns: - Response: The HTTP response containing the settings/schema for - the endpoint. - """ - endpoint: WorkflowEndpoint = self.get_object() - connection_type = endpoint.connection_type - endpoint_type = endpoint.endpoint_type - schema = None - if endpoint_type == WorkflowEndpoint.EndpointType.SOURCE: - if connection_type == WorkflowEndpoint.ConnectionType.API: - schema = SourceConnector.get_json_schema_for_api() - if connection_type == WorkflowEndpoint.ConnectionType.FILESYSTEM: - schema = SourceConnector.get_json_schema_for_file_system() - if endpoint_type == WorkflowEndpoint.EndpointType.DESTINATION: - if connection_type == WorkflowEndpoint.ConnectionType.DATABASE: - schema = DestinationConnector.get_json_schema_for_database() - if connection_type == WorkflowEndpoint.ConnectionType.FILESYSTEM: - schema = DestinationConnector.get_json_schema_for_file_system() - if connection_type == WorkflowEndpoint.ConnectionType.API: - schema = DestinationConnector.get_json_schema_for_api() - - return Response( - { - "status": status.HTTP_200_OK, - "schema": schema, - } - ) - - @action(detail=True, methods=["get"]) - def workflow_endpoint_list(self, request: Request, pk: str) -> Response: - """Retrieve a list of endpoints for a specific workflow. - - Parameters: - request (Request): The HTTP request object. - pk (str): The primary key of the workflow. - - Returns: - Response: The HTTP response containing the serialized list of - endpoints. - """ - endpoints = WorkflowEndpointUtils.get_endpoints_for_workflow(pk) - serializer = WorkflowEndpointSerializer(endpoints, many=True) - return Response(serializer.data) diff --git a/backend/workflow_manager/endpoint_v2/database_utils.py b/backend/workflow_manager/endpoint_v2/database_utils.py index 0f8a9fef4..e6b3a32d4 100644 --- a/backend/workflow_manager/endpoint_v2/database_utils.py +++ b/backend/workflow_manager/endpoint_v2/database_utils.py @@ -5,9 +5,9 @@ from typing import Any, Optional from utils.constants import Common -from workflow_manager.endpoint.constants import DBConnectionClass, TableColumns -from workflow_manager.endpoint.exceptions import UnstractDBException -from workflow_manager.workflow.enums import AgentName, ColumnModes +from workflow_manager.endpoint_v2.constants import DBConnectionClass, TableColumns +from workflow_manager.endpoint_v2.exceptions import UnstractDBException +from workflow_manager.workflow_v2.enums import AgentName, ColumnModes from unstract.connectors.databases import connectors as db_connectors from unstract.connectors.databases.exceptions import UnstractDBConnectorException diff --git a/backend/workflow_manager/workflow/__init__.py b/backend/workflow_manager/workflow/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/workflow_manager/workflow/admin.py b/backend/workflow_manager/workflow/admin.py deleted file mode 100644 index 705599e49..000000000 --- a/backend/workflow_manager/workflow/admin.py +++ /dev/null @@ -1,4 +0,0 @@ -from django.contrib import admin -from workflow_manager.workflow.models.workflow import Workflow - -admin.site.register(Workflow) diff --git a/backend/workflow_manager/workflow/apps.py b/backend/workflow_manager/workflow/apps.py deleted file mode 100644 index 3c0d821d7..000000000 --- a/backend/workflow_manager/workflow/apps.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.apps import AppConfig - - -class WorkflowConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "workflow_manager.workflow" - - def ready(self): - from workflow_manager.workflow.execution_log_utils import ( - create_log_consumer_scheduler_if_not_exists, - ) - - create_log_consumer_scheduler_if_not_exists() diff --git a/backend/workflow_manager/workflow/constants.py b/backend/workflow_manager/workflow/constants.py deleted file mode 100644 index a1cf08b43..000000000 --- a/backend/workflow_manager/workflow/constants.py +++ /dev/null @@ -1,60 +0,0 @@ -class WorkflowKey: - """Dict keys related to workflows.""" - - LLM_RESPONSE = "llm_response" - WF_STEPS = "steps" - WF_TOOL = "tool" - WF_INSTANCE_SETTINGS = "instance_settings" - WF_TOOL_INSTANCE_ID = "tool_instance_id" - WF_CONNECTOR_CLASS = "connector_class" - WF_INPUT = "input" - WF_OUTPUT = "output" - WF_TOOL_UUID = "id" - WF_ID = "workflow_id" - WF_NAME = "workflow_name" - WF_OWNER = "workflow_owner" - WF_TOOL_INSTANCES = "tool_instances" - WF_IS_ACTIVE = "is_active" - EXECUTION_ACTION = "execution_action" - # Keys from provisional workflow - PWF_RESULT = "result" - PWF_OUTPUT = "output" - PWF_COST_TYPE = "cost_type" - PWF_COST = "cost" - PWF_TIME_TAKEN = "time_taken" - WF_CACHE_PATTERN = r"^cache:{?\w{8}-?\w{4}-?\w{4}-?\w{4}-?\w{12}}?$" - WF_PROJECT_GUID = "guid" - - -class WorkflowExecutionKey: - WORKFLOW_EXECUTION_ID_PREFIX = "workflow" - EXECUTION_ID = "execution_id" - LOG_GUID = "log_guid" - WITH_LOG = "with_log" - - -class WorkflowErrors: - WORKFLOW_EXISTS = "Workflow with this configuration already exists." - DUPLICATE_API = "It appears that a duplicate call may have been made." - INVALID_EXECUTION_ID = "Invalid execution_id" - - -class CeleryConfigurations: - INTERVAL = 2 - - -class Tool: - APIOPS = "apiops" - - -class WorkflowMessages: - CACHE_CLEAR_SUCCESS = "Cache cleared successfully." - CACHE_CLEAR_FAILED = "Failed to clear cache." - CACHE_EMPTY = "Cache is already empty." - CELERY_TIMEOUT_MESSAGE = ( - "Your request is being processed. Please wait." - "You can check the status using the status API." - ) - FILE_MARKER_CLEAR_SUCCESS = "File marker cleared successfully." - FILE_MARKER_CLEAR_FAILED = "Failed to clear file marker." - WORKFLOW_EXECUTION_NOT_FOUND = "Workflow execution not found." diff --git a/backend/workflow_manager/workflow/dto.py b/backend/workflow_manager/workflow/dto.py deleted file mode 100644 index 66ea68976..000000000 --- a/backend/workflow_manager/workflow/dto.py +++ /dev/null @@ -1,120 +0,0 @@ -from dataclasses import dataclass -from typing import Any, Optional - -from celery.result import AsyncResult -from workflow_manager.workflow.constants import WorkflowKey - - -@dataclass -class ProvisionalWorkflow: - result: str - output: dict[str, str] - cost_type: str - cost: str - time_taken: float - - def __init__(self, input_dict: dict[str, Any]) -> None: - self.result = input_dict.get(WorkflowKey.PWF_RESULT, "") - self.output = input_dict.get(WorkflowKey.PWF_OUTPUT, {}) - self.cost_type = input_dict.get(WorkflowKey.PWF_COST_TYPE, "") - self.cost = input_dict.get(WorkflowKey.PWF_COST, "") - self.time_taken = input_dict.get(WorkflowKey.PWF_TIME_TAKEN, 0.0) - - -@dataclass -class ExecutionResponse: - workflow_id: str - execution_id: str - execution_status: str - log_id: Optional[str] = None - status_api: Optional[str] = None - error: Optional[str] = None - mode: Optional[str] = None - result: Optional[Any] = None - message: Optional[str] = None - - def __post_init__(self) -> None: - self.log_id = self.log_id or None - self.mode = self.mode or None - self.error = self.error or None - self.result = self.result or None - self.message = self.message or None - self.status_api = self.status_api or None - - def remove_result_metadata_keys(self, keys_to_remove: list[str] = []) -> None: - """Removes specified keys from the 'metadata' dictionary within each - 'result' dictionary in the 'result' list attribute of the instance. If - 'keys_to_remove' is empty, the 'metadata' key itself is removed. - - Args: - keys_to_remove (List[str]): List of keys to be removed from 'metadata'. - """ - if not isinstance(self.result, list): - return - - for item in self.result: - if not isinstance(item, dict): - break - - result = item.get("result") - if not isinstance(result, dict): - break - - self._remove_specific_keys(result=result, keys_to_remove=keys_to_remove) - - def _remove_specific_keys(self, result: dict, keys_to_remove: list[str]) -> None: - """Removes specified keys from the 'metadata' dictionary within the - provided 'result' dictionary. If 'keys_to_remove' is empty, the - 'metadata' dictionary is cleared. - - Args: - result (dict): The dictionary containing the 'metadata' key. - keys_to_remove (List[str]): List of keys to be removed from 'metadata'. - """ - metadata = result.get("metadata", {}) - if keys_to_remove: - for key in keys_to_remove: - metadata.pop(key, None) - else: - metadata = {} - self._update_metadata(result=result, metadata=metadata) - - def _update_metadata(self, result: dict, metadata: dict) -> None: - """Updates the 'metadata' key in the provided 'result' dictionary. If - 'metadata' is empty, removes the 'metadata' key from 'result'. - - Args: - result (dict): The dictionary to be updated. - metadata (dict): The new metadata dictionary to be set. If empty, 'metadata' - is removed. - """ - if metadata: - result["metadata"] = metadata - else: - result.pop("metadata", None) - - -@dataclass -class AsyncResultData: - id: str - status: str - result: Any - is_ready: bool - is_failed: bool - info: Any - - def __init__(self, async_result: AsyncResult): - self.id = async_result.id - self.status = async_result.status - self.result = async_result.result - self.is_ready = async_result.ready() - self.is_failed = async_result.failed() - self.info = async_result.info - if isinstance(self.result, Exception): - self.result = str(self.result) - - def to_dict(self) -> dict[str, Any]: - return { - "status": self.status, - "result": self.result, - } diff --git a/backend/workflow_manager/workflow/enums.py b/backend/workflow_manager/workflow/enums.py deleted file mode 100644 index cc13a1e35..000000000 --- a/backend/workflow_manager/workflow/enums.py +++ /dev/null @@ -1,73 +0,0 @@ -from enum import Enum - -from utils.common_utils import ModelEnum - - -class WorkflowExecutionMethod(Enum): - INSTANT = "INSTANT" - QUEUED = "QUEUED" - - -class ExecutionStatus(ModelEnum): - """An enumeration representing the various statuses of an execution - process. - - Statuses: - PENDING: The execution's entry has been created in the database. - QUEUED: The execution task is queued for asynchronous execution - INITIATED: The execution has been initiated. - READY: The execution is ready for the build phase. - EXECUTING: The execution is currently in progress. - COMPLETED: The execution has been successfully completed. - STOPPED: The execution was stopped by the user - (applicable to step executions). - ERROR: An error occurred during the execution process. - - Note: - Intermediate statuses might not be experienced due to - Django's query triggering once all processes are completed. - """ - - PENDING = "PENDING" - INITIATED = "INITIATED" - QUEUED = "QUEUED" - READY = "READY" - EXECUTING = "EXECUTING" - COMPLETED = "COMPLETED" - STOPPED = "STOPPED" - ERROR = "ERROR" - - -class SchemaType(Enum): - """Possible types for workflow module's JSON schema. - - Values: - src: Refers to the source module's schema - dest: Refers to the destination module's schema - """ - - SRC = "src" - DEST = "dest" - - -class SchemaEntity(Enum): - """Possible entities for workflow module's JSON schema. - - Values: - file: Refers to schema for file based sources - api: Refers to schema for API based sources - db: Refers to schema for DB based destinations - """ - - FILE = "file" - API = "api" - DB = "db" - - -class ColumnModes(Enum): - WRITE_JSON_TO_A_SINGLE_COLUMN = "Write JSON to a single column" - SPLIT_JSON_INTO_COLUMNS = "Split JSON into columns" - - -class AgentName(Enum): - UNSTRACT_DBWRITER = "Unstract/DBWriter" diff --git a/backend/workflow_manager/workflow/exceptions.py b/backend/workflow_manager/workflow/exceptions.py deleted file mode 100644 index 39087ab8e..000000000 --- a/backend/workflow_manager/workflow/exceptions.py +++ /dev/null @@ -1,56 +0,0 @@ -from rest_framework.exceptions import APIException - - -class WorkflowGenerationError(APIException): - status_code = 500 - default_detail = "Error generating workflow." - - -class WorkflowRegenerationError(APIException): - status_code = 500 - default_detail = "Error regenerating workflow." - - -class WorkflowExecutionError(APIException): - status_code = 500 - default_detail = "Error executing workflow." - - -class WorkflowDoesNotExistError(APIException): - status_code = 404 - default_detail = "Workflow does not exist" - - -class TaskDoesNotExistError(APIException): - status_code = 404 - default_detail = "Task does not exist" - - -class DuplicateActionError(APIException): - status_code = 400 - default_detail = "Action is running" - - -class InvalidRequest(APIException): - status_code = 400 - default_detail = "Invalid Request" - - -class MissingEnvException(APIException): - status_code = 500 - default_detail = "At least one active platform key should be available." - - -class InternalException(APIException): - """Internal Error. - - Args: - APIException (_type_): _description_ - """ - - status_code = 500 - - -class WorkflowExecutionNotExist(APIException): - status_code = 404 - default_detail = "Workflow execution does not exist" diff --git a/backend/workflow_manager/workflow/execution.py b/backend/workflow_manager/workflow/execution.py deleted file mode 100644 index e04f0bb8c..000000000 --- a/backend/workflow_manager/workflow/execution.py +++ /dev/null @@ -1,418 +0,0 @@ -import logging -import time -from typing import Optional - -from account.constants import Common -from django.db import connection -from platform_settings.platform_auth_service import PlatformAuthenticationService -from tool_instance.models import ToolInstance -from tool_instance.tool_processor import ToolProcessor -from unstract.tool_registry.dto import Tool -from unstract.workflow_execution import WorkflowExecutionService -from unstract.workflow_execution.dto import ToolInstance as ToolInstanceDataClass -from unstract.workflow_execution.dto import WorkflowDto -from unstract.workflow_execution.enums import ExecutionType, LogComponent, LogState -from unstract.workflow_execution.exceptions import StopExecution -from utils.local_context import StateStore -from workflow_manager.workflow.constants import WorkflowKey -from workflow_manager.workflow.enums import ExecutionStatus -from workflow_manager.workflow.exceptions import WorkflowExecutionError -from workflow_manager.workflow.models import Workflow, WorkflowExecution -from workflow_manager.workflow.models.execution import EXECUTION_ERROR_LENGTH - -logger = logging.getLogger(__name__) - - -class WorkflowExecutionServiceHelper(WorkflowExecutionService): - def __init__( - self, - workflow: Workflow, - tool_instances: list[ToolInstance], - organization_id: Optional[str] = None, - pipeline_id: Optional[str] = None, - single_step: bool = False, - scheduled: bool = False, - mode: tuple[str, str] = WorkflowExecution.Mode.INSTANT, - workflow_execution: Optional[WorkflowExecution] = None, - use_file_history: bool = True, - ) -> None: - tool_instances_as_dto = [] - for tool_instance in tool_instances: - tool_instances_as_dto.append( - self.convert_tool_instance_model_to_data_class(tool_instance) - ) - workflow_as_dto: WorkflowDto = self.convert_workflow_model_to_data_class( - workflow=workflow - ) - organization_id = organization_id or connection.tenant.schema_name - if not organization_id: - raise WorkflowExecutionError(detail="invalid Organization ID") - - platform_key = PlatformAuthenticationService.get_active_platform_key() - super().__init__( - organization_id=organization_id, - workflow_id=workflow.id, - workflow=workflow_as_dto, - tool_instances=tool_instances_as_dto, - platform_service_api_key=str(platform_key.key), - ignore_processed_entities=False, - ) - if not workflow_execution: - # Use pipline_id for pipelines / API deployment - # since session might not be present. - log_events_id = StateStore.get(Common.LOG_EVENTS_ID) - self.execution_log_id = log_events_id if log_events_id else pipeline_id - self.execution_mode = mode - self.execution_method: tuple[str, str] = ( - WorkflowExecution.Method.SCHEDULED - if scheduled - else WorkflowExecution.Method.DIRECT - ) - self.execution_type: tuple[str, str] = ( - WorkflowExecution.Type.STEP - if single_step - else WorkflowExecution.Type.COMPLETE - ) - workflow_execution = WorkflowExecution( - pipeline_id=pipeline_id, - workflow_id=workflow.id, - execution_mode=mode, - execution_method=self.execution_method, - execution_type=self.execution_type, - status=ExecutionStatus.INITIATED.value, - execution_log_id=self.execution_log_id, - ) - workflow_execution.save() - else: - self.execution_mode = workflow_execution.execution_mode - self.execution_method = workflow_execution.execution_method - self.execution_type = workflow_execution.execution_type - self.execution_log_id = workflow_execution.execution_log_id - - self.set_messaging_channel(str(self.execution_log_id)) - project_settings = {} - project_settings[WorkflowKey.WF_PROJECT_GUID] = str(self.execution_log_id) - self.workflow_id = workflow.id - self.project_settings = project_settings - self.pipeline_id = pipeline_id - self.execution_id = str(workflow_execution.id) - self.use_file_history = use_file_history - logger.info( - f"Executing for Pipeline ID: {pipeline_id}, " - f"workflow ID: {self.workflow_id}, execution ID: {self.execution_id}, " - f"web socket messaging channel ID: {self.execution_log_id}" - ) - - self.compilation_result = self.compile_workflow(execution_id=self.execution_id) - - @classmethod - def create_workflow_execution( - cls, - workflow_id: str, - pipeline_id: Optional[str] = None, - single_step: bool = False, - scheduled: bool = False, - log_events_id: Optional[str] = None, - execution_id: Optional[str] = None, - mode: tuple[str, str] = WorkflowExecution.Mode.INSTANT, - ) -> WorkflowExecution: - # Validating with existing execution - existing_execution = cls.get_execution_instance_by_id(execution_id) - if existing_execution: - return existing_execution - - execution_method: tuple[str, str] = ( - WorkflowExecution.Method.SCHEDULED - if scheduled - else WorkflowExecution.Method.DIRECT - ) - execution_type: tuple[str, str] = ( - WorkflowExecution.Type.STEP - if single_step - else WorkflowExecution.Type.COMPLETE - ) - execution_log_id = log_events_id if log_events_id else pipeline_id - # TODO: Using objects.create() instead - workflow_execution = WorkflowExecution( - pipeline_id=pipeline_id, - workflow_id=workflow_id, - execution_mode=mode, - execution_method=execution_method, - execution_type=execution_type, - status=ExecutionStatus.PENDING.value, - execution_log_id=execution_log_id, - ) - if execution_id: - workflow_execution.id = execution_id - workflow_execution.save() - return workflow_execution - - def update_execution( - self, - status: Optional[ExecutionStatus] = None, - execution_time: Optional[float] = None, - error: Optional[str] = None, - increment_attempt: bool = False, - ) -> None: - execution = WorkflowExecution.objects.get(pk=self.execution_id) - - if status is not None: - execution.status = status.value - if execution_time is not None: - execution.execution_time = execution_time - if error: - execution.error_message = error[:EXECUTION_ERROR_LENGTH] - if increment_attempt: - execution.attempts += 1 - - execution.save() - - def has_successful_compilation(self) -> bool: - return self.compilation_result["success"] is True - - def get_execution_instance(self) -> WorkflowExecution: - execution: WorkflowExecution = WorkflowExecution.objects.get( - pk=self.execution_id - ) - return execution - - @classmethod - def get_execution_instance_by_id( - cls, execution_id: str - ) -> Optional[WorkflowExecution]: - """Get execution by execution ID. - - Args: - execution_id (str): UID of execution entity - - Returns: - Optional[WorkflowExecution]: WorkflowExecution Entity - """ - try: - execution: WorkflowExecution = WorkflowExecution.objects.get( - pk=execution_id - ) - return execution - except WorkflowExecution.DoesNotExist: - return None - - def build(self) -> None: - if self.compilation_result["success"] is True: - self.build_workflow() - self.update_execution(status=ExecutionStatus.READY) - else: - logger.error( - "Errors while compiling workflow " - f"{self.compilation_result['problems']}" - ) - self.update_execution( - status=ExecutionStatus.ERROR, - error=self.compilation_result["problems"][0], - ) - raise WorkflowExecutionError(self.compilation_result["problems"][0]) - - def execute(self, run_id: str, file_name: str, single_step: bool = False) -> None: - execution_type = ExecutionType.COMPLETE - if single_step: - execution_type = ExecutionType.STEP - - if self.compilation_result["success"] is False: - error_message = ( - f"Errors while compiling workflow " - f"{self.compilation_result['problems'][0]}" - ) - raise WorkflowExecutionError(error_message) - - if self.execution_mode not in ( - WorkflowExecution.Mode.INSTANT, - WorkflowExecution.Mode.QUEUE, - ): - error_message = f"Unknown Execution Method {self.execution_mode}" - raise WorkflowExecutionError(error_message) - - start_time = time.time() - try: - self.execute_workflow( - run_id=run_id, file_name=file_name, execution_type=execution_type - ) - end_time = time.time() - execution_time = end_time - start_time - except StopExecution as exception: - end_time = time.time() - execution_time = end_time - start_time - logger.info(f"Execution {self.execution_id} stopped") - raise exception - except Exception as exception: - end_time = time.time() - execution_time = end_time - start_time - message = str(exception)[:EXECUTION_ERROR_LENGTH] - logger.error( - f"Execution {self.execution_id} ran for {execution_time:.4f}s, " - f" Error {exception}" - ) - raise WorkflowExecutionError(message) from exception - - def publish_initial_workflow_logs(self, total_files: int) -> None: - """Publishes the initial logs for the workflow. - - Args: - total_files (int): The total number of matched files. - - Returns: - None - """ - self.publish_log(f"Total matched files: {total_files}") - self.publish_update_log(LogState.BEGIN_WORKFLOW, "1", LogComponent.STATUS_BAR) - self.publish_update_log( - LogState.RUNNING, "Ready for execution", LogComponent.WORKFLOW - ) - - def publish_final_workflow_logs( - self, total_files: int, successful_files: int, failed_files: int - ) -> None: - """Publishes the final logs for the workflow. - - Returns: - None - """ - self.publish_update_log(LogState.END_WORKFLOW, "1", LogComponent.STATUS_BAR) - self.publish_update_log( - LogState.SUCCESS, "Executed successfully", LogComponent.WORKFLOW - ) - self.publish_log( - f"Total files: {total_files}, " - f"{successful_files} successfully executed and {failed_files} error(s)" - ) - - def publish_initial_tool_execution_logs( - self, current_file_idx: int, total_files: int, file_name: str - ) -> None: - """Publishes the initial logs for tool execution. - - Args: - current_file_idx (int): 1-based index for the current file being processed - total_files (int): The total number of files to process - file_name (str): The name of the file being processed. - - Returns: - None - """ - self.publish_update_log( - component=LogComponent.STATUS_BAR, - state=LogState.MESSAGE, - message=f"Processing file {file_name} {current_file_idx}/{total_files}", - ) - self.publish_log(f"Processing file {file_name}") - - def execute_input_file( - self, - run_id: str, - file_name: str, - single_step: bool, - ) -> None: - """Executes the input file. - - Args: - run_id (str): UUID for a single run of a file - file_name (str): The name of the file to be executed. - single_step (bool): Flag indicating whether to execute in - single step mode. - """ - execution_type = ExecutionType.COMPLETE - if single_step: - execution_type = ExecutionType.STEP - self.publish_log( - "No entries found in cache, " f"running the tool(s) for {file_name}" - ) - self.publish_update_log( - state=LogState.SUCCESS, - message=f"{file_name} Sent for execution", - component=LogComponent.SOURCE, - ) - self.execute(run_id, file_name, single_step) - self.publish_log(f"Tool executed successfully for '{file_name}'") - self._handle_execution_type(execution_type) - - def initiate_tool_execution( - self, - current_file_idx: int, - total_files: int, - file_name: str, - single_step: bool, - ) -> None: - """Initiates the execution of a tool for a specific file in the - workflow. - - Args: - current_file_idx (int): 1-based index for the current file being processed - total_step (int): The total number of files to process in the workflow - file_name (str): The name of the file being processed - single_step (bool): Flag indicating whether the execution is in - single-step mode - - Returns: - None - - Raises: - None - """ - execution_type = ExecutionType.COMPLETE - if single_step: - execution_type = ExecutionType.STEP - self.publish_initial_tool_execution_logs( - current_file_idx, total_files, file_name - ) - self._handle_execution_type(execution_type) - - source_status_message = ( - f"({current_file_idx}/{total_files})Processing file {file_name}" - ) - self.publish_update_log( - state=LogState.RUNNING, - message=source_status_message, - component=LogComponent.SOURCE, - ) - self.publish_log("Trying to fetch results from cache") - - @staticmethod - def update_execution_err(execution_id: str, err_msg: str = "") -> WorkflowExecution: - try: - execution = WorkflowExecution.objects.get(pk=execution_id) - execution.status = ExecutionStatus.ERROR.value - execution.error_message = err_msg[:EXECUTION_ERROR_LENGTH] - execution.save() - return execution - except WorkflowExecution.DoesNotExist: - logger.error(f"execution doesn't exist {execution_id}") - - @staticmethod - def update_execution_task(execution_id: str, task_id: str) -> None: - try: - execution = WorkflowExecution.objects.get(pk=execution_id) - execution.task_id = task_id - execution.save() - except WorkflowExecution.DoesNotExist: - logger.error(f"execution doesn't exist {execution_id}") - - @staticmethod - def convert_tool_instance_model_to_data_class( - tool_instance: ToolInstance, - ) -> ToolInstanceDataClass: - tool: Tool = ToolProcessor.get_tool_by_uid(tool_instance.tool_id) - tool_dto = ToolInstanceDataClass( - id=tool_instance.id, - tool_id=tool_instance.tool_id, - workflow=tool_instance.workflow.id, - metadata=tool_instance.metadata, - step=tool_instance.step, - properties=tool.properties, - image_name=tool.image_name, - image_tag=tool.image_tag, - ) - return tool_dto - - @staticmethod - def convert_workflow_model_to_data_class( - workflow: Workflow, - ) -> WorkflowDto: - return WorkflowDto(id=workflow.id) diff --git a/backend/workflow_manager/workflow/execution_log_utils.py b/backend/workflow_manager/workflow/execution_log_utils.py deleted file mode 100644 index f2e0420ac..000000000 --- a/backend/workflow_manager/workflow/execution_log_utils.py +++ /dev/null @@ -1,117 +0,0 @@ -import logging -import sys -from collections import defaultdict - -from account.models import Organization -from celery import shared_task -from django.db import IntegrityError -from django.db.utils import ProgrammingError -from django_celery_beat.models import IntervalSchedule, PeriodicTask -from django_tenants.utils import get_tenant_model, tenant_context -from utils.cache_service import CacheService -from utils.constants import ExecutionLogConstants -from utils.dto import LogDataDTO -from workflow_manager.workflow.models.execution_log import ExecutionLog - -logger = logging.getLogger(__name__) - - -@shared_task(bind=True) -def consume_log_history(self): - organization_logs = defaultdict(list) - logs_count = 0 - - while logs_count < ExecutionLogConstants.LOGS_BATCH_LIMIT: - log = CacheService.lpop(ExecutionLogConstants.LOG_QUEUE_NAME) - if not log: - break - - log_data = LogDataDTO.from_json(log) - if not log_data: - continue - - organization_id = log_data.organization_id - organization_logs[organization_id].append( - ExecutionLog( - execution_id=log_data.execution_id, - data=log_data.data, - event_time=log_data.event_time, - ) - ) - logs_count += 1 - logger.info(f"Logs count: {logs_count}") - for organization_id, logs in organization_logs.items(): - store_to_db(organization_id, logs) - - -def create_log_consumer_scheduler_if_not_exists() -> None: - try: - interval, _ = IntervalSchedule.objects.get_or_create( - every=ExecutionLogConstants.CONSUMER_INTERVAL, - period=IntervalSchedule.SECONDS, - ) - except ProgrammingError as error: - logger.warning( - "ProgrammingError occurred while creating " - "log consumer scheduler. If you are currently running " - "migrations for new environment, you can ignore this warning" - ) - if "migrate" not in sys.argv: - logger.warning(f"ProgrammingError details: {error}") - return - except IntervalSchedule.MultipleObjectsReturned as error: - logger.error(f"Error occurred while getting interval schedule: {error}") - interval = IntervalSchedule.objects.filter( - every=ExecutionLogConstants.CONSUMER_INTERVAL, - period=IntervalSchedule.SECONDS, - ).first() - try: - # Create the scheduler - task, created = PeriodicTask.objects.get_or_create( - name=ExecutionLogConstants.PERIODIC_TASK_NAME, - task=ExecutionLogConstants.TASK, - defaults={ - "interval": interval, - "queue": ExecutionLogConstants.CELERY_QUEUE_NAME, - "enabled": ExecutionLogConstants.IS_ENABLED, - }, - ) - if not created: - task.enabled = ExecutionLogConstants.IS_ENABLED - task.interval = interval - task.queue = ExecutionLogConstants.CELERY_QUEUE_NAME - task.save() - logger.info("Log consumer scheduler updated successfully.") - else: - logger.info("Log consumer scheduler created successfully.") - except IntegrityError as error: - logger.error(f"Error occurred while creating log consumer scheduler: {error}") - - -def store_to_db(organization_id: str, execution_logs: list[ExecutionLog]) -> None: - try: - tenant: Organization = get_tenant_model().objects.get( - schema_name=organization_id - ) - except Organization.DoesNotExist: - logger.error(f"Organization with ID {organization_id} does not exist.") - return - - # Store the log data in the database within tenant context - with tenant_context(tenant): - ExecutionLog.objects.bulk_create(objs=execution_logs, ignore_conflicts=True) - - -class ExecutionLogUtils: - - @staticmethod - def get_execution_logs_by_execution_id(execution_id) -> list[ExecutionLog]: - """Get all execution logs for a given execution ID. - - Args: - execution_id (int): The ID of the execution. - - Returns: - list[ExecutionLog]: A list of ExecutionLog objects. - """ - return ExecutionLog.objects.filter(execution_id=execution_id) diff --git a/backend/workflow_manager/workflow/execution_log_view.py b/backend/workflow_manager/workflow/execution_log_view.py deleted file mode 100644 index 137349e72..000000000 --- a/backend/workflow_manager/workflow/execution_log_view.py +++ /dev/null @@ -1,27 +0,0 @@ -import logging - -from permissions.permission import IsOwner -from rest_framework import viewsets -from rest_framework.versioning import URLPathVersioning -from utils.pagination import CustomPagination -from workflow_manager.workflow.models.execution_log import ExecutionLog -from workflow_manager.workflow.serializers import WorkflowExecutionLogSerializer - -logger = logging.getLogger(__name__) - - -class WorkflowExecutionLogViewSet(viewsets.ModelViewSet): - versioning_class = URLPathVersioning - permission_classes = [IsOwner] - serializer_class = WorkflowExecutionLogSerializer - pagination_class = CustomPagination - - EVENT_TIME_FELID_ASC = "event_time" - - def get_queryset(self): - # Get the execution_id:pk from the URL path - execution_id = self.kwargs.get("pk") - queryset = ExecutionLog.objects.filter(execution_id=execution_id).order_by( - self.EVENT_TIME_FELID_ASC - ) - return queryset diff --git a/backend/workflow_manager/workflow/execution_view.py b/backend/workflow_manager/workflow/execution_view.py deleted file mode 100644 index 7c5866d0c..000000000 --- a/backend/workflow_manager/workflow/execution_view.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging - -from permissions.permission import IsOwner -from rest_framework import viewsets -from rest_framework.versioning import URLPathVersioning -from workflow_manager.workflow.models.execution import WorkflowExecution -from workflow_manager.workflow.serializers import WorkflowExecutionSerializer - -logger = logging.getLogger(__name__) - - -class WorkflowExecutionViewSet(viewsets.ModelViewSet): - versioning_class = URLPathVersioning - permission_classes = [IsOwner] - serializer_class = WorkflowExecutionSerializer - - CREATED_AT_FIELD_DESC = "-created_at" - - def get_queryset(self): - # Get the uuid:pk from the URL path - workflow_id = self.kwargs.get("pk") - queryset = WorkflowExecution.objects.filter(workflow_id=workflow_id).order_by( - self.CREATED_AT_FIELD_DESC - ) - return queryset diff --git a/backend/workflow_manager/workflow/file_history_helper.py b/backend/workflow_manager/workflow/file_history_helper.py deleted file mode 100644 index 8317f106e..000000000 --- a/backend/workflow_manager/workflow/file_history_helper.py +++ /dev/null @@ -1,89 +0,0 @@ -import logging -from typing import Any, Optional - -from django.db.utils import IntegrityError -from workflow_manager.workflow.enums import ExecutionStatus -from workflow_manager.workflow.models.file_history import FileHistory -from workflow_manager.workflow.models.workflow import Workflow - -logger = logging.getLogger(__name__) - - -class FileHistoryHelper: - """A helper class for managing file history related operations.""" - - @staticmethod - def get_file_history( - workflow: Workflow, cache_key: Optional[str] = None - ) -> Optional[FileHistory]: - """Retrieve a file history record based on the cache key. - - Args: - cache_key (Optional[str]): The cache key to search for. - - Returns: - Optional[FileHistory]: The matching file history record, if found. - """ - if not cache_key: - return None - try: - file_history: FileHistory = FileHistory.objects.get( - cache_key=cache_key, workflow=workflow - ) - except FileHistory.DoesNotExist: - return None - return file_history - - @staticmethod - def create_file_history( - cache_key: str, - workflow: Workflow, - status: ExecutionStatus, - result: Any, - metadata: Any, - error: Optional[str] = None, - file_name: Optional[str] = None, - ) -> FileHistory: - """Create a new file history record. - - Args: - cache_key (str): The cache key for the file. - workflow (Workflow): The associated workflow. - status (ExecutionStatus): The execution status. - result (Any): The result from the execution. - - Returns: - FileHistory: The newly created file history record. - """ - try: - file_history: FileHistory = FileHistory.objects.create( - workflow=workflow, - cache_key=cache_key, - status=status.value, - result=str(result), - meta_data=str(metadata), - error=str(error) if error else "", - ) - except IntegrityError: - # TODO: Need to find why duplicate insert is coming - logger.warning( - "Trying to insert duplication data for filename %s for workflow %s", - file_name, - workflow, - ) - file_history = FileHistoryHelper.get_file_history( - workflow=workflow, cache_key=cache_key - ) - - return file_history - - @staticmethod - def clear_history_for_workflow( - workflow: Workflow, - ) -> None: - """Clear all file history records associated with a workflow. - - Args: - workflow (Workflow): The workflow to clear the history for. - """ - FileHistory.objects.filter(workflow=workflow).delete() diff --git a/backend/workflow_manager/workflow/migrations/0001_initial.py b/backend/workflow_manager/workflow/migrations/0001_initial.py deleted file mode 100644 index 50483fe13..000000000 --- a/backend/workflow_manager/workflow/migrations/0001_initial.py +++ /dev/null @@ -1,285 +0,0 @@ -# Generated by Django 4.2.1 on 2024-01-23 11:18 - -import uuid - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - initial = True - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("project", "0001_initial"), - ] - - operations = [ - migrations.CreateModel( - name="WorkflowExecution", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "pipeline_id", - models.UUIDField( - db_comment="ID of the associated pipeline, if applicable", - editable=False, - null=True, - ), - ), - ( - "task_id", - models.UUIDField( - db_comment="task id of asynchronous execution", - editable=False, - null=True, - ), - ), - ( - "workflow_id", - models.UUIDField( - db_comment="Id of workflow to be executed", - editable=False, - ), - ), - ( - "project_settings_id", - models.UUIDField( - db_comment="Id of project settings used while execution", - editable=False, - ), - ), - ( - "execution_mode", - models.CharField( - choices=[ - ("INSTANT", "will be executed immediately"), - ("QUEUE", "will be placed in a queue"), - ], - db_comment="Mode of execution", - ), - ), - ( - "execution_method", - models.CharField( - choices=[ - ("DIRECT", " Execution triggered manually"), - ("SCHEDULED", "Scheduled execution"), - ], - db_comment="Method of execution", - ), - ), - ( - "execution_type", - models.CharField( - choices=[ - ("COMPLETE", "For complete execution"), - ("STEP", "For step-by-step execution "), - ], - db_comment="Type of execution", - ), - ), - ( - "status", - models.CharField( - db_comment="Current status of execution", default="" - ), - ), - ( - "error_message", - models.CharField( - blank=True, - db_comment="Details of encountered errors", - default="", - max_length=256, - ), - ), - ( - "attempts", - models.IntegerField( - db_comment="number of attempts taken", default=0 - ), - ), - ( - "execution_time", - models.FloatField( - db_comment="execution time in seconds", default=0 - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="Workflow", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ("prompt_name", models.CharField(default="", max_length=32)), - ("description", models.TextField(default="", max_length=490)), - ( - "workflow_name", - models.CharField(max_length=128, unique=True), - ), - ( - "settings", - models.JSONField(db_comment="Workflow settings", null=True), - ), - ("prompt_text", models.TextField(default="")), - ("is_active", models.BooleanField(default=False)), - ("status", models.CharField(default="", max_length=16)), - ("llm_response", models.TextField()), - ( - "deployment_type", - models.CharField( - choices=[ - ("DEFAULT", "Not ready yet"), - ("ETL", "ETL pipeline"), - ("TASK", "TASK pipeline"), - ("API", "API deployment"), - ("APP", "App deployment"), - ], - db_comment="Type of workflow deployment", - default="DEFAULT", - ), - ), - ( - "source_settings", - models.JSONField( - db_comment="Settings for the Source module", null=True - ), - ), - ( - "destination_settings", - models.JSONField( - db_comment="Settings for the Destination module", - null=True, - ), - ), - ( - "created_by", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="created_workflow", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "modified_by", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="modified_workflow", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "project", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="project_workflow", - to="project.project", - ), - ), - ( - "workflow_owner", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="workflow_owner", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="FileHistory", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "cache_key", - models.CharField( - db_comment="Hash value of file contents, WF and tool modified times", - max_length=64, - ), - ), - ( - "status", - models.TextField( - choices=[ - ("PENDING", "PENDING"), - ("INITIATED", "INITIATED"), - ("QUEUED", "QUEUED"), - ("READY", "READY"), - ("EXECUTING", "EXECUTING"), - ("COMPLETED", "COMPLETED"), - ("STOPPED", "STOPPED"), - ("ERROR", "ERROR"), - ], - db_comment="Latest status of execution", - ), - ), - ( - "error", - models.TextField( - blank=True, db_comment="Error message", default="" - ), - ), - ( - "result", - models.TextField(blank=True, db_comment="Result from execution"), - ), - ( - "workflow", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="filehistory_workflow", - to="workflow.workflow", - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/workflow_manager/workflow/migrations/0002_remove_workflow_settings.py b/backend/workflow_manager/workflow/migrations/0002_remove_workflow_settings.py deleted file mode 100644 index b42e3efa1..000000000 --- a/backend/workflow_manager/workflow/migrations/0002_remove_workflow_settings.py +++ /dev/null @@ -1,16 +0,0 @@ -# Generated by Django 4.2.1 on 2024-02-28 11:29 - -from django.db import migrations - - -class Migration(migrations.Migration): - dependencies = [ - ("workflow", "0001_initial"), - ] - - operations = [ - migrations.RemoveField( - model_name="workflow", - name="settings", - ), - ] diff --git a/backend/workflow_manager/workflow/migrations/0003_workflowexecution_execution_log_id_and_more.py b/backend/workflow_manager/workflow/migrations/0003_workflowexecution_execution_log_id_and_more.py deleted file mode 100644 index 6f554f6b7..000000000 --- a/backend/workflow_manager/workflow/migrations/0003_workflowexecution_execution_log_id_and_more.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 4.2.1 on 2024-03-17 17:20 - -import uuid - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("workflow", "0002_remove_workflow_settings"), - ] - - operations = [ - migrations.AddField( - model_name="workflowexecution", - name="execution_log_id", - field=models.CharField( - db_comment="Execution log events Id", default="", editable=False - ), - ), - migrations.AlterField( - model_name="workflowexecution", - name="project_settings_id", - field=models.UUIDField( - db_comment="Id of project settings used while execution", - default=uuid.uuid4, - editable=False, - ), - ), - ] diff --git a/backend/workflow_manager/workflow/migrations/0004_filehistory_workflow_cachekey.py b/backend/workflow_manager/workflow/migrations/0004_filehistory_workflow_cachekey.py deleted file mode 100644 index e3a7ac762..000000000 --- a/backend/workflow_manager/workflow/migrations/0004_filehistory_workflow_cachekey.py +++ /dev/null @@ -1,56 +0,0 @@ -# Generated by Django 4.2.1 on 2024-05-14 07:03 -from typing import Any - -from django.db import migrations, models -from django.db.models import Count - - -class Migration(migrations.Migration): - - dependencies = [ - ("workflow", "0003_workflowexecution_execution_log_id_and_more"), - ] - - def UniqueConstraintDataMigration(apps: Any, schema_editor: Any) -> None: - """Migrate data to enforce unique constraint on the 'FileHistory' model - based on the fields 'workflow' and 'cache_key'. - - For each duplicate combination of 'workflow' and 'cache_key', - keep the entry with the latest 'created_at' timestamp and delete the rest. - Parameters: - apps (Any): The state of the historical models at the point in time - when the migration is applied. - schema_editor (Any): The backend-specific schema editor class for - the database. - Returns: - None - """ - FileHistory = apps.get_model("workflow", "FileHistory") - - duplicates = ( - FileHistory.objects.values("workflow", "cache_key") - .annotate(count=Count("id")) - .filter(count__gt=1) - ) - - for duplicate in duplicates: - entries = FileHistory.objects.filter( - workflow=duplicate["workflow"], - cache_key=duplicate["cache_key"], - ).order_by("-created_at") - entries_to_keep = entries.first() - entries_to_delete = entries.exclude(id=entries_to_keep.id) - entries_to_delete.delete() - - operations = [ - migrations.RunPython( - UniqueConstraintDataMigration, - reverse_code=migrations.RunPython.noop, - ), - migrations.AddConstraint( - model_name="filehistory", - constraint=models.UniqueConstraint( - fields=("workflow", "cache_key"), name="workflow_cacheKey" - ), - ), - ] diff --git a/backend/workflow_manager/workflow/migrations/0005_executionlog.py b/backend/workflow_manager/workflow/migrations/0005_executionlog.py deleted file mode 100644 index cdd1249b8..000000000 --- a/backend/workflow_manager/workflow/migrations/0005_executionlog.py +++ /dev/null @@ -1,43 +0,0 @@ -# Generated by Django 4.2.1 on 2024-05-23 08:50 - -import uuid - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("workflow", "0004_filehistory_workflow_cachekey"), - ] - - operations = [ - migrations.CreateModel( - name="ExecutionLog", - fields=[ - ("created_at", models.DateTimeField(auto_now_add=True)), - ("modified_at", models.DateTimeField(auto_now=True)), - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "execution_id", - models.UUIDField(db_comment="Execution ID", editable=False), - ), - ("data", models.JSONField(db_comment="Execution log data")), - ( - "event_time", - models.DateTimeField(db_comment="Execution log event time"), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/backend/workflow_manager/workflow/migrations/0006_filehistory_meta_data.py b/backend/workflow_manager/workflow/migrations/0006_filehistory_meta_data.py deleted file mode 100644 index 46b43dc79..000000000 --- a/backend/workflow_manager/workflow/migrations/0006_filehistory_meta_data.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 4.2.1 on 2024-06-21 18:00 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("workflow", "0005_executionlog"), - ] - - operations = [ - migrations.AddField( - model_name="filehistory", - name="meta_data", - field=models.TextField(blank=True, db_comment="MetaData from execution"), - ), - ] diff --git a/backend/workflow_manager/workflow/migrations/__init__.py b/backend/workflow_manager/workflow/migrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/backend/workflow_manager/workflow/models/__init__.py b/backend/workflow_manager/workflow/models/__init__.py deleted file mode 100644 index 938907762..000000000 --- a/backend/workflow_manager/workflow/models/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .execution import WorkflowExecution # noqa: F401 -from .execution_log import ExecutionLog # noqa: F401 -from .file_history import FileHistory # noqa: F401 -from .workflow import Workflow # noqa: F401 diff --git a/backend/workflow_manager/workflow/models/execution.py b/backend/workflow_manager/workflow/models/execution.py deleted file mode 100644 index 474c19244..000000000 --- a/backend/workflow_manager/workflow/models/execution.py +++ /dev/null @@ -1,76 +0,0 @@ -import uuid - -from django.db import models -from utils.models.base_model import BaseModel - -EXECUTION_ERROR_LENGTH = 256 - - -class WorkflowExecution(BaseModel): - class Mode(models.TextChoices): - INSTANT = "INSTANT", "will be executed immediately" - QUEUE = "QUEUE", "will be placed in a queue" - - class Method(models.TextChoices): - DIRECT = "DIRECT", " Execution triggered manually" - SCHEDULED = "SCHEDULED", "Scheduled execution" - - class Type(models.TextChoices): - COMPLETE = "COMPLETE", "For complete execution" - STEP = "STEP", "For step-by-step execution " - - id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - # TODO: Make as foreign key to access the instance directly - pipeline_id = models.UUIDField( - editable=False, - null=True, - db_comment="ID of the associated pipeline, if applicable", - ) - task_id = models.UUIDField( - editable=False, - null=True, - db_comment="task id of asynchronous execution", - ) - # We can remove workflow_id if it not required - workflow_id = models.UUIDField( - editable=False, db_comment="Id of workflow to be executed" - ) - project_settings_id = models.UUIDField( - editable=False, - default=uuid.uuid4, - db_comment="Id of project settings used while execution", - ) - execution_mode = models.CharField( - choices=Mode.choices, db_comment="Mode of execution" - ) - execution_method = models.CharField( - choices=Method.choices, db_comment="Method of execution" - ) - execution_type = models.CharField( - choices=Type.choices, db_comment="Type of execution" - ) - execution_log_id = models.CharField( - default="", editable=False, db_comment="Execution log events Id" - ) - # TODO: Restrict with an enum - status = models.CharField(default="", db_comment="Current status of execution") - error_message = models.CharField( - max_length=EXECUTION_ERROR_LENGTH, - blank=True, - default="", - db_comment="Details of encountered errors", - ) - attempts = models.IntegerField(default=0, db_comment="number of attempts taken") - execution_time = models.FloatField( - default=0, db_comment="execution time in seconds" - ) - - def __str__(self) -> str: - return ( - f"Workflow execution: {self.id} (" - f"pipeline ID: {self.pipeline_id}, " - f"workflow iD: {self.workflow_id}, " - f"execution method: {self.execution_method}, " - f"status: {self.status}, " - f"error message: {self.error_message})" - ) diff --git a/backend/workflow_manager/workflow/models/execution_log.py b/backend/workflow_manager/workflow/models/execution_log.py deleted file mode 100644 index d3b2c5ae7..000000000 --- a/backend/workflow_manager/workflow/models/execution_log.py +++ /dev/null @@ -1,17 +0,0 @@ -import uuid - -from django.db import models -from utils.models.base_model import BaseModel - - -class ExecutionLog(BaseModel): - id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - execution_id = models.UUIDField( - editable=False, - db_comment="Execution ID", - ) - data = models.JSONField(db_comment="Execution log data") - event_time = models.DateTimeField(db_comment="Execution log event time") - - def __str__(self): - return f"Execution ID: {self.execution_id}, Message: {self.data}" diff --git a/backend/workflow_manager/workflow/models/file_history.py b/backend/workflow_manager/workflow/models/file_history.py deleted file mode 100644 index a31064803..000000000 --- a/backend/workflow_manager/workflow/models/file_history.py +++ /dev/null @@ -1,50 +0,0 @@ -import uuid - -from django.db import models -from utils.models.base_model import BaseModel -from workflow_manager.workflow.enums import ExecutionStatus -from workflow_manager.workflow.models.workflow import Workflow - -HASH_LENGTH = 64 - - -class FileHistory(BaseModel): - def is_completed(self) -> bool: - """Check if the execution status is completed. - - Returns: - bool: True if the execution status is completed, False otherwise. - """ - return ( - self.status is not None and self.status == ExecutionStatus.COMPLETED.value - ) - - id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - cache_key = models.CharField( - max_length=HASH_LENGTH, - db_comment="Hash value of file contents, WF and tool modified times", - ) - workflow = models.ForeignKey( - Workflow, - on_delete=models.CASCADE, - related_name="filehistory_workflow", - ) - status = models.TextField( - choices=ExecutionStatus.choices(), - db_comment="Latest status of execution", - ) - error = models.TextField( - blank=True, - default="", - db_comment="Error message", - ) - result = models.TextField(blank=True, db_comment="Result from execution") - meta_data = models.TextField(blank=True, db_comment="MetaData from execution") - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["workflow", "cache_key"], - name="workflow_cacheKey", - ), - ] diff --git a/backend/workflow_manager/workflow/models/workflow.py b/backend/workflow_manager/workflow/models/workflow.py deleted file mode 100644 index 7954b9559..000000000 --- a/backend/workflow_manager/workflow/models/workflow.py +++ /dev/null @@ -1,80 +0,0 @@ -import uuid - -from account.models import User -from django.db import models -from project.models import Project -from utils.models.base_model import BaseModel - -PROMPT_NAME_LENGTH = 32 -WORKFLOW_STATUS_LENGTH = 16 -DESCRIPTION_FIELD_LENGTH = 490 -WORKFLOW_NAME_SIZE = 128 - - -class Workflow(BaseModel): - class WorkflowType(models.TextChoices): - DEFAULT = "DEFAULT", "Not ready yet" - ETL = "ETL", "ETL pipeline" - TASK = "TASK", "TASK pipeline" - API = "API", "API deployment" - APP = "APP", "App deployment" - - class ExecutionAction(models.TextChoices): - START = "START", "Start the Execution" - NEXT = "NEXT", "Execute next tool" - STOP = "STOP", "Stop the execution" - CONTINUE = "CONTINUE", "Continue to full execution" - - # TODO Make this guid as primaryId instaed of current id bigint - id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - project = models.ForeignKey( - Project, - on_delete=models.CASCADE, - related_name="project_workflow", - null=True, - blank=True, - ) - # TODO: Move prompt fields as a One-One relationship/into Prompt instead - prompt_name = models.CharField(max_length=PROMPT_NAME_LENGTH, default="") - description = models.TextField(max_length=DESCRIPTION_FIELD_LENGTH, default="") - workflow_name = models.CharField(max_length=WORKFLOW_NAME_SIZE, unique=True) - prompt_text = models.TextField(default="") - is_active = models.BooleanField(default=False) - status = models.CharField(max_length=WORKFLOW_STATUS_LENGTH, default="") - llm_response = models.TextField() - workflow_owner = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="workflow_owner", - null=True, - blank=True, - ) - deployment_type = models.CharField( - choices=WorkflowType.choices, - db_comment="Type of workflow deployment", - default=WorkflowType.DEFAULT, - ) - source_settings = models.JSONField( - null=True, db_comment="Settings for the Source module" - ) - destination_settings = models.JSONField( - null=True, db_comment="Settings for the Destination module" - ) - - created_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="created_workflow", - null=True, - blank=True, - ) - modified_by = models.ForeignKey( - User, - on_delete=models.SET_NULL, - related_name="modified_workflow", - null=True, - blank=True, - ) - - def __str__(self) -> str: - return f"{self.id}, name: {self.workflow_name}" diff --git a/backend/workflow_manager/workflow/serializers.py b/backend/workflow_manager/workflow/serializers.py deleted file mode 100644 index 51629c193..000000000 --- a/backend/workflow_manager/workflow/serializers.py +++ /dev/null @@ -1,136 +0,0 @@ -import logging -from typing import Any, Optional, Union - -from project.constants import ProjectKey -from rest_framework.serializers import ( - CharField, - ChoiceField, - JSONField, - ModelSerializer, - Serializer, - UUIDField, - ValidationError, -) -from tool_instance.serializers import ToolInstanceSerializer -from tool_instance.tool_instance_helper import ToolInstanceHelper -from workflow_manager.endpoint.models import WorkflowEndpoint -from workflow_manager.workflow.constants import WorkflowExecutionKey, WorkflowKey -from workflow_manager.workflow.models.execution import WorkflowExecution -from workflow_manager.workflow.models.execution_log import ExecutionLog -from workflow_manager.workflow.models.workflow import Workflow - -from backend.constants import RequestKey -from backend.serializers import AuditSerializer - -logger = logging.getLogger(__name__) - - -class WorkflowSerializer(AuditSerializer): - tool_instances = ToolInstanceSerializer(many=True, read_only=True) - - class Meta: - model = Workflow - fields = "__all__" - extra_kwargs = { - WorkflowKey.LLM_RESPONSE: { - "required": False, - }, - } - - def to_representation(self, instance: Workflow) -> dict[str, str]: - representation: dict[str, str] = super().to_representation(instance) - representation[WorkflowKey.WF_NAME] = instance.workflow_name - representation[WorkflowKey.WF_TOOL_INSTANCES] = ToolInstanceSerializer( - ToolInstanceHelper.get_tool_instances_by_workflow( - workflow_id=instance.id, order_by="step" - ), - many=True, - context=self.context, - ).data - representation["created_by_email"] = instance.created_by.email - return representation - - def create(self, validated_data: dict[str, Any]) -> Any: - if self.context.get(RequestKey.REQUEST): - validated_data[WorkflowKey.WF_OWNER] = self.context.get( - RequestKey.REQUEST - ).user - return super().create(validated_data) - - -class ExecuteWorkflowSerializer(Serializer): - workflow_id = UUIDField(required=False) - project_id = UUIDField(required=False) - execution_action = ChoiceField( - choices=Workflow.ExecutionAction.choices, required=False - ) - execution_id = UUIDField(required=False) - log_guid = UUIDField(required=False) - # TODO: Add other fields to handle WFExecution method, mode .etc. - - def get_workflow_id( - self, validated_data: dict[str, Union[str, None]] - ) -> Optional[str]: - return validated_data.get(WorkflowKey.WF_ID) - - def get_project_id( - self, validated_data: dict[str, Union[str, None]] - ) -> Optional[str]: - return validated_data.get(ProjectKey.PROJECT_ID) - - def get_execution_id( - self, validated_data: dict[str, Union[str, None]] - ) -> Optional[str]: - return validated_data.get(WorkflowExecutionKey.EXECUTION_ID) - - def get_log_guid( - self, validated_data: dict[str, Union[str, None]] - ) -> Optional[str]: - return validated_data.get(WorkflowExecutionKey.LOG_GUID) - - def get_execution_action( - self, validated_data: dict[str, Union[str, None]] - ) -> Optional[str]: - return validated_data.get(WorkflowKey.EXECUTION_ACTION) - - def validate( - self, data: dict[str, Union[str, None]] - ) -> dict[str, Union[str, None]]: - workflow_id = data.get(WorkflowKey.WF_ID) - project_id = data.get(ProjectKey.PROJECT_ID) - - if not workflow_id and not project_id: - raise ValidationError( - "At least one of 'workflow_id' or 'project_id' is required." - ) - - return data - - -class ExecuteWorkflowResponseSerializer(Serializer): - workflow_id = UUIDField() - execution_id = UUIDField() - execution_status = CharField() - log_id = CharField() - error = CharField() - result = JSONField() - - -class WorkflowEndpointSerializer(ModelSerializer): - workflow_name = CharField(source="workflow.workflow_name", read_only=True) - - class Meta: - model = WorkflowEndpoint - fields = "__all__" - - -class WorkflowExecutionSerializer(ModelSerializer): - class Meta: - model = WorkflowExecution - fields = "__all__" - - -class WorkflowExecutionLogSerializer(ModelSerializer): - class Meta: - model = ExecutionLog - fields = "__all__" diff --git a/backend/workflow_manager/workflow/tests.py b/backend/workflow_manager/workflow/tests.py deleted file mode 100644 index a39b155ac..000000000 --- a/backend/workflow_manager/workflow/tests.py +++ /dev/null @@ -1 +0,0 @@ -# Create your tests here. diff --git a/backend/workflow_manager/workflow/urls.py b/backend/workflow_manager/workflow/urls.py deleted file mode 100644 index d75b5fc43..000000000 --- a/backend/workflow_manager/workflow/urls.py +++ /dev/null @@ -1,77 +0,0 @@ -from django.urls import path -from rest_framework.urlpatterns import format_suffix_patterns -from workflow_manager.workflow.execution_log_view import WorkflowExecutionLogViewSet -from workflow_manager.workflow.execution_view import WorkflowExecutionViewSet -from workflow_manager.workflow.views import WorkflowViewSet - -workflow_list = WorkflowViewSet.as_view( - { - "get": "list", - "post": "create", - } -) -workflow_detail = WorkflowViewSet.as_view( - # fmt: off - { - 'get': 'retrieve', - 'put': 'update', - 'patch': 'partial_update', - 'delete': 'destroy' - } - # fmt: on -) -workflow_execute = WorkflowViewSet.as_view({"post": "execute", "put": "activate"}) -execution_entity = WorkflowExecutionViewSet.as_view({"get": "retrieve"}) -execution_list = WorkflowExecutionViewSet.as_view({"get": "list"}) -execution_log_list = WorkflowExecutionLogViewSet.as_view({"get": "list"}) -workflow_clear_cache = WorkflowViewSet.as_view({"get": "clear_cache"}) -workflow_clear_file_marker = WorkflowViewSet.as_view({"get": "clear_file_marker"}) -workflow_schema = WorkflowViewSet.as_view({"get": "get_schema"}) -can_update = WorkflowViewSet.as_view({"get": "can_update"}) -urlpatterns = format_suffix_patterns( - [ - path("", workflow_list, name="workflow-list"), - path("/", workflow_detail, name="workflow-detail"), - path( - "/clear-cache/", - workflow_clear_cache, - name="clear-cache", - ), - path( - "/clear-file-marker/", - workflow_clear_file_marker, - name="clear-file-marker", - ), - path( - "/can-update/", - can_update, - name="can-update", - ), - path("execute/", workflow_execute, name="execute-workflow"), - path( - "active//", - workflow_execute, - name="active-workflow", - ), - path( - "/execution/", - execution_list, - name="execution-list", - ), - path( - "execution//", - execution_entity, - name="workflow-detail", - ), - path( - "execution//logs/", - execution_log_list, - name="execution-log", - ), - path( - "schema/", - workflow_schema, - name="workflow-schema", - ), - ] -) diff --git a/backend/workflow_manager/workflow/utils.py b/backend/workflow_manager/workflow/utils.py deleted file mode 100644 index d89dfa6a2..000000000 --- a/backend/workflow_manager/workflow/utils.py +++ /dev/null @@ -1,61 +0,0 @@ -from typing import Any - -from workflow_manager.endpoint.dto import FileHash -from workflow_manager.workflow.models.workflow import Workflow - - -class WorkflowUtil: - """A utility class for managing workflow operations, particularly for - selecting files for manual review and updating file destination based on - review criteria.""" - - @staticmethod - def _mrq_files( - percentage: float, - n: int, - ) -> Any: - """Placeholder method for selecting a subset of files based on a given - percentage. - - Args: - percentage (float): The percentage of files to select. - n (int): The total number of files. - - Returns: - Any: The method is currently a placeholder and does not return a value. - """ - pass - - @classmethod - def get_q_no_list(cls, workflow: Workflow, total_files: int) -> Any: - """Placeholder method for retrieving a list of files to be reviewed - based on workflow rules. - - Args: - workflow (Workflow): The workflow instance to be processed. - total_files (int): The total number of files in the workflow. - - Returns: - Any: The method is currently a placeholder and does not return a value. - """ - pass - - @staticmethod - def add_file_destination_filehash( - index: int, - q_file_no_list: Any, - file_hash: FileHash, - ) -> FileHash: - """Updates the file destination in the FileHash object if the file - index is marked for manual review. - - Args: - index (int): The index of the file being processed. - q_file_no_list (Any): A list or set of file indices marked for review. - file_hash (FileHash): The FileHash object to be updated. - - Returns: - FileHash: The potentially updated FileHash object with the file - destination modified. - """ - return file_hash diff --git a/backend/workflow_manager/workflow/views.py b/backend/workflow_manager/workflow/views.py deleted file mode 100644 index f8f2d4cd8..000000000 --- a/backend/workflow_manager/workflow/views.py +++ /dev/null @@ -1,293 +0,0 @@ -import logging -from typing import Any, Optional - -from connector.connector_instance_helper import ConnectorInstanceHelper -from django.conf import settings -from django.db.models.query import QuerySet -from permissions.permission import IsOwner -from pipeline.models import Pipeline -from pipeline.pipeline_processor import PipelineProcessor -from rest_framework import serializers, status, viewsets -from rest_framework.decorators import action -from rest_framework.request import Request -from rest_framework.response import Response -from rest_framework.versioning import URLPathVersioning -from utils.filtering import FilterHelper -from workflow_manager.endpoint.destination import DestinationConnector -from workflow_manager.endpoint.dto import FileHash -from workflow_manager.endpoint.endpoint_utils import WorkflowEndpointUtils -from workflow_manager.endpoint.source import SourceConnector -from workflow_manager.workflow.constants import WorkflowKey -from workflow_manager.workflow.dto import ExecutionResponse -from workflow_manager.workflow.enums import SchemaEntity, SchemaType -from workflow_manager.workflow.exceptions import ( - InternalException, - WorkflowDoesNotExistError, - WorkflowGenerationError, - WorkflowRegenerationError, -) -from workflow_manager.workflow.models.execution import WorkflowExecution -from workflow_manager.workflow.models.workflow import Workflow -from workflow_manager.workflow.serializers import ( - ExecuteWorkflowResponseSerializer, - ExecuteWorkflowSerializer, - WorkflowSerializer, -) -from workflow_manager.workflow.workflow_helper import ( - WorkflowHelper, - WorkflowSchemaHelper, -) - -from backend.constants import RequestKey - -logger = logging.getLogger(__name__) - - -def make_execution_response(response: ExecutionResponse) -> Any: - return ExecuteWorkflowResponseSerializer(response).data - - -class WorkflowViewSet(viewsets.ModelViewSet): - versioning_class = URLPathVersioning - permission_classes = [IsOwner] - queryset = Workflow.objects.all() - - def get_queryset(self) -> QuerySet: - filter_args = FilterHelper.build_filter_args( - self.request, - RequestKey.PROJECT, - WorkflowKey.WF_OWNER, - WorkflowKey.WF_IS_ACTIVE, - ) - queryset = ( - Workflow.objects.filter(created_by=self.request.user, **filter_args) - if filter_args - else Workflow.objects.filter(created_by=self.request.user) - ) - order_by = self.request.query_params.get("order_by") - if order_by == "desc": - queryset = queryset.order_by("-modified_at") - elif order_by == "asc": - queryset = queryset.order_by("modified_at") - - return queryset - - def get_serializer_class(self) -> serializers.Serializer: - if self.action == "execute": - return ExecuteWorkflowSerializer - else: - return WorkflowSerializer - - def perform_update(self, serializer: WorkflowSerializer) -> Workflow: - """To edit a workflow. - - Raises: WorkflowGenerationError - """ - kwargs = {} - - try: - workflow = serializer.save(**kwargs) - return workflow - except Exception as e: - logger.error(f"Error saving workflow to DB: {e}") - raise WorkflowRegenerationError - - def perform_create(self, serializer: WorkflowSerializer) -> Workflow: - """To create a new workflow. Creates the Workflow instance first and - uses it to generate the tool instances. - - Raises: WorkflowGenerationError - """ - try: - workflow = serializer.save( - is_active=True, - ) - WorkflowEndpointUtils.create_endpoints_for_workflow(workflow) - - # Enable GCS configurations to create GCS while creating a workflow - if ( - settings.GOOGLE_STORAGE_ACCESS_KEY_ID - and settings.UNSTRACT_FREE_STORAGE_BUCKET_NAME - ): - ConnectorInstanceHelper.create_default_gcs_connector( - workflow, self.request.user - ) - - except Exception as e: - logger.error(f"Error saving workflow to DB: {e}") - raise WorkflowGenerationError - return workflow - - def get_execution(self, request: Request, pk: str) -> Response: - execution = WorkflowHelper.get_current_execution(pk) - return Response(make_execution_response(execution), status=status.HTTP_200_OK) - - def get_workflow_by_id_or_project_id( - self, - workflow_id: Optional[str] = None, - project_id: Optional[str] = None, - ) -> Workflow: - """Retrieve workflow by workflow id or project Id. - - Args: - workflow_id (Optional[str], optional): workflow Id. - project_id (Optional[str], optional): Project Id. - - Raises: - WorkflowDoesNotExistError: _description_ - - Returns: - Workflow: workflow - """ - if workflow_id: - workflow = WorkflowHelper.get_workflow_by_id(workflow_id) - elif project_id: - workflow = WorkflowHelper.get_active_workflow_by_project_id(project_id) - else: - raise WorkflowDoesNotExistError() - return workflow - - def execute( - self, - request: Request, - pipeline_guid: Optional[str] = None, - ) -> Response: - self.serializer_class = ExecuteWorkflowSerializer - serializer = ExecuteWorkflowSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - workflow_id = serializer.get_workflow_id(serializer.validated_data) - project_id = serializer.get_project_id(serializer.validated_data) - execution_id = serializer.get_execution_id(serializer.validated_data) - execution_action = serializer.get_execution_action(serializer.validated_data) - file_objs = request.FILES.getlist("files") - hashes_of_files: dict[str, FileHash] = {} - use_file_history: bool = True - - # API based execution - if file_objs and execution_id and workflow_id: - use_file_history = False - hashes_of_files = SourceConnector.add_input_file_to_api_storage( - workflow_id=workflow_id, - execution_id=execution_id, - file_objs=file_objs, - use_file_history=False, - ) - - try: - workflow = self.get_workflow_by_id_or_project_id( - workflow_id=workflow_id, project_id=project_id - ) - execution_response = self.execute_workflow( - workflow=workflow, - execution_action=execution_action, - execution_id=execution_id, - pipeline_guid=pipeline_guid, - hash_values_of_files=hashes_of_files, - use_file_history=use_file_history, - ) - if ( - execution_response.execution_status == "ERROR" - and execution_response.result - and execution_response.result[0].get("error") - ): - raise InternalException(execution_response.result[0].get("error")) - return Response( - make_execution_response(execution_response), - status=status.HTTP_200_OK, - ) - except Exception as exception: - logger.error(f"Error while executing workflow: {exception}") - if file_objs and execution_id and workflow_id: - DestinationConnector.delete_api_storage_dir( - workflow_id=workflow_id, execution_id=execution_id - ) - raise exception - - def execute_workflow( - self, - workflow: Workflow, - execution_action: Optional[str] = None, - execution_id: Optional[str] = None, - pipeline_guid: Optional[str] = None, - hash_values_of_files: dict[str, FileHash] = {}, - use_file_history: bool = True, - ) -> ExecutionResponse: - if execution_action is not None: - # Step execution - execution_response = WorkflowHelper.step_execution( - workflow, - execution_action, - execution_id=execution_id, - hash_values_of_files=hash_values_of_files, - ) - elif pipeline_guid: - # pipeline execution - PipelineProcessor.update_pipeline( - pipeline_guid, Pipeline.PipelineStatus.INPROGRESS - ) - execution_response = WorkflowHelper.complete_execution( - workflow=workflow, - execution_id=execution_id, - pipeline_id=pipeline_guid, - execution_mode=WorkflowExecution.Mode.INSTANT, - hash_values_of_files=hash_values_of_files, - use_file_history=use_file_history, - ) - else: - execution_response = WorkflowHelper.complete_execution( - workflow=workflow, - execution_id=execution_id, - execution_mode=WorkflowExecution.Mode.INSTANT, - hash_values_of_files=hash_values_of_files, - use_file_history=use_file_history, - ) - return execution_response - - def activate(self, request: Request, pk: str) -> Response: - workflow = WorkflowHelper.active_project_workflow(pk) - serializer = WorkflowSerializer(workflow) - return Response(serializer.data, status=status.HTTP_200_OK) - - @action(detail=True, methods=["get"]) - def clear_cache(self, request: Request, *args: Any, **kwargs: Any) -> Response: - workflow = self.get_object() - response: dict[str, Any] = WorkflowHelper.clear_cache(workflow_id=workflow.id) - return Response(response.get("message"), status=response.get("status")) - - @action(detail=True, methods=["get"]) - def can_update(self, request: Request, pk: str) -> Response: - response: dict[str, Any] = WorkflowHelper.can_update_workflow(pk) - return Response(response, status=status.HTTP_200_OK) - - @action(detail=True, methods=["get"]) - def clear_file_marker( - self, request: Request, *args: Any, **kwargs: Any - ) -> Response: - workflow = self.get_object() - response: dict[str, Any] = WorkflowHelper.clear_file_marker( - workflow_id=workflow.id - ) - return Response(response.get("message"), status=response.get("status")) - - @action(detail=False, methods=["get"]) - def get_schema(self, request: Request, *args: Any, **kwargs: Any) -> Response: - """Retrieves the JSON schema for source/destination type modules for - entities file/API/DB. - - Takes query params `type` (defaults to "src") and - `entity` (defaults to "file"). - - Returns: - Response: JSON schema for the request made - """ - schema_type = request.query_params.get("type", SchemaType.SRC.value) - schema_entity = request.query_params.get("entity", SchemaEntity.FILE.value) - - WorkflowSchemaHelper.validate_request( - schema_type=SchemaType(schema_type), - schema_entity=SchemaEntity(schema_entity), - ) - json_schema = WorkflowSchemaHelper.get_json_schema( - schema_type=schema_type, schema_entity=schema_entity - ) - return Response(data=json_schema, status=status.HTTP_200_OK) diff --git a/backend/workflow_manager/workflow/workflow_helper.py b/backend/workflow_manager/workflow/workflow_helper.py deleted file mode 100644 index 6d30bcdd0..000000000 --- a/backend/workflow_manager/workflow/workflow_helper.py +++ /dev/null @@ -1,883 +0,0 @@ -import json -import logging -import os -import traceback -from typing import Any, Optional -from uuid import uuid4 - -from account.constants import Common -from account.models import Organization -from api.models import APIDeployment -from api.utils import APIDeploymentUtils -from celery import current_task -from celery import exceptions as celery_exceptions -from celery import shared_task -from celery.result import AsyncResult -from django.db import IntegrityError, connection -from django_tenants.utils import get_tenant_model, tenant_context -from pipeline.models import Pipeline -from pipeline.pipeline_processor import PipelineProcessor -from rest_framework import serializers -from tool_instance.constants import ToolInstanceKey -from tool_instance.models import ToolInstance -from tool_instance.tool_instance_helper import ToolInstanceHelper -from unstract.workflow_execution.enums import LogComponent, LogLevel, LogState -from unstract.workflow_execution.exceptions import StopExecution -from utils.cache_service import CacheService -from utils.local_context import StateStore -from workflow_manager.endpoint.destination import DestinationConnector -from workflow_manager.endpoint.dto import FileHash -from workflow_manager.endpoint.source import SourceConnector -from workflow_manager.workflow.constants import ( - CeleryConfigurations, - WorkflowErrors, - WorkflowExecutionKey, - WorkflowMessages, -) -from workflow_manager.workflow.dto import AsyncResultData, ExecutionResponse -from workflow_manager.workflow.enums import ExecutionStatus, SchemaEntity, SchemaType -from workflow_manager.workflow.exceptions import ( - InvalidRequest, - TaskDoesNotExistError, - WorkflowDoesNotExistError, - WorkflowExecutionNotExist, -) -from workflow_manager.workflow.execution import WorkflowExecutionServiceHelper -from workflow_manager.workflow.file_history_helper import FileHistoryHelper -from workflow_manager.workflow.models.execution import WorkflowExecution -from workflow_manager.workflow.models.workflow import Workflow -from workflow_manager.workflow.utils import WorkflowUtil - -logger = logging.getLogger(__name__) - - -class WorkflowHelper: - @staticmethod - def get_workflow_by_id(id: str) -> Workflow: - try: - workflow: Workflow = Workflow.objects.get(pk=id) - if not workflow or workflow is None: - raise WorkflowDoesNotExistError() - return workflow - except Workflow.DoesNotExist: - logger.error(f"Error getting workflow: {id}") - raise WorkflowDoesNotExistError() - - @staticmethod - def get_active_workflow_by_project_id(project_id: str) -> Workflow: - try: - workflow: Workflow = Workflow.objects.filter( - project_id=project_id, is_active=True - ).first() - if not workflow or workflow is None: - raise WorkflowDoesNotExistError() - return workflow - except Workflow.DoesNotExist: - raise WorkflowDoesNotExistError() - - @staticmethod - def active_project_workflow(workflow_id: str) -> Workflow: - workflow: Workflow = WorkflowHelper.get_workflow_by_id(workflow_id) - workflow.is_active = True - workflow.save() - return workflow - - @staticmethod - def build_workflow_execution_service( - organization_id: Optional[str], - workflow: Workflow, - tool_instances: list[ToolInstance], - pipeline_id: Optional[str], - single_step: bool, - scheduled: bool, - execution_mode: tuple[str, str], - workflow_execution: Optional[WorkflowExecution], - use_file_history: bool = True, # Will be False for API deployment alone - ) -> WorkflowExecutionServiceHelper: - workflow_execution_service = WorkflowExecutionServiceHelper( - organization_id=organization_id, - workflow=workflow, - tool_instances=tool_instances, - pipeline_id=pipeline_id, - single_step=single_step, - scheduled=scheduled, - mode=execution_mode, - workflow_execution=workflow_execution, - use_file_history=use_file_history, - ) - workflow_execution_service.build() - return workflow_execution_service - - @staticmethod - def process_input_files( - workflow: Workflow, - source: SourceConnector, - destination: DestinationConnector, - execution_service: WorkflowExecutionServiceHelper, - single_step: bool, - hash_values_of_files: dict[str, FileHash] = {}, - ) -> WorkflowExecution: - input_files, total_files = source.list_files_from_source(hash_values_of_files) - error_message = None - successful_files = 0 - failed_files = 0 - execution_service.publish_initial_workflow_logs(total_files) - execution_service.update_execution( - ExecutionStatus.EXECUTING, increment_attempt=True - ) - if total_files > 0: - q_file_no_list = WorkflowUtil.get_q_no_list(workflow, total_files) - - for index, (file_path, file_hash) in enumerate(input_files.items()): - file_number = index + 1 - file_hash = WorkflowUtil.add_file_destination_filehash( - file_number, - q_file_no_list, - file_hash, - ) - try: - error = WorkflowHelper.process_file( - current_file_idx=file_number, - total_files=total_files, - input_file=file_hash.file_path, - workflow=workflow, - source=source, - destination=destination, - execution_service=execution_service, - single_step=single_step, - file_hash=file_hash, - ) - if error: - failed_files += 1 - else: - successful_files += 1 - except StopExecution as e: - execution_service.update_execution( - ExecutionStatus.STOPPED, error=str(e) - ) - break - except Exception as e: - failed_files += 1 - error_message = f"Error processing file '{file_path}'. {e}" - logger.error(error_message, stack_info=True, exc_info=True) - execution_service.publish_log( - message=error_message, level=LogLevel.ERROR - ) - if failed_files and failed_files >= total_files: - execution_service.update_execution( - ExecutionStatus.ERROR, error=error_message - ) - else: - execution_service.update_execution(ExecutionStatus.COMPLETED) - - execution_service.publish_final_workflow_logs( - total_files=total_files, - successful_files=successful_files, - failed_files=failed_files, - ) - return execution_service.get_execution_instance() - - @staticmethod - def process_file( - current_file_idx: int, - total_files: int, - input_file: str, - workflow: Workflow, - source: SourceConnector, - destination: DestinationConnector, - execution_service: WorkflowExecutionServiceHelper, - single_step: bool, - file_hash: FileHash, - ) -> Optional[str]: - error: Optional[str] = None - file_name = source.add_file_to_volume( - input_file_path=input_file, file_hash=file_hash - ) - try: - execution_service.initiate_tool_execution( - current_file_idx, total_files, file_name, single_step - ) - if not file_hash.is_executed: - # Multiple run_ids are linked to an execution_id - # Each run_id corresponds to workflow runs for a single file - run_id = str(uuid4()) - execution_service.execute_input_file( - run_id=run_id, - file_name=file_name, - single_step=single_step, - ) - except StopExecution: - raise - except Exception as e: - error = f"Error processing file '{os.path.basename(input_file)}'. {str(e)}" - execution_service.publish_log(error, level=LogLevel.ERROR) - execution_service.publish_update_log( - LogState.RUNNING, - f"Processing output for {file_name}", - LogComponent.DESTINATION, - ) - destination.handle_output( - file_name=file_name, - file_hash=file_hash, - workflow=workflow, - input_file_path=input_file, - error=error, - use_file_history=execution_service.use_file_history, - ) - execution_service.publish_update_log( - LogState.SUCCESS, - f"{file_name}'s output is processed successfully", - LogComponent.DESTINATION, - ) - return error - - @staticmethod - def validate_tool_instances_meta( - tool_instances: list[ToolInstance], - ) -> None: - for tool in tool_instances: - ToolInstanceHelper.validate_tool_settings( - user=tool.workflow.created_by, - tool_uid=tool.tool_id, - tool_meta=tool.metadata, - ) - - @staticmethod - def run_workflow( - workflow: Workflow, - hash_values_of_files: dict[str, FileHash] = {}, - organization_id: Optional[str] = None, - pipeline_id: Optional[str] = None, - scheduled: bool = False, - single_step: bool = False, - workflow_execution: Optional[WorkflowExecution] = None, - execution_mode: Optional[tuple[str, str]] = None, - use_file_history: bool = True, - ) -> ExecutionResponse: - tool_instances: list[ToolInstance] = ( - ToolInstanceHelper.get_tool_instances_by_workflow( - workflow.id, ToolInstanceKey.STEP - ) - ) - - WorkflowHelper.validate_tool_instances_meta(tool_instances=tool_instances) - execution_mode = execution_mode or WorkflowExecution.Mode.INSTANT - execution_service = WorkflowHelper.build_workflow_execution_service( - organization_id=organization_id, - workflow=workflow, - tool_instances=tool_instances, - pipeline_id=pipeline_id, - single_step=single_step, - scheduled=scheduled, - execution_mode=execution_mode, - workflow_execution=workflow_execution, - use_file_history=use_file_history, - ) - execution_id = execution_service.execution_id - source = SourceConnector( - organization_id=organization_id, - workflow=workflow, - execution_id=execution_id, - execution_service=execution_service, - ) - destination = DestinationConnector( - workflow=workflow, - execution_id=execution_id, - execution_service=execution_service, - ) - # Validating endpoints - source.validate() - destination.validate() - # Execution Process - try: - workflow_execution = WorkflowHelper.process_input_files( - workflow, - source, - destination, - execution_service, - single_step=single_step, - hash_values_of_files=hash_values_of_files, - ) - WorkflowHelper._update_pipeline_status( - pipeline_id=pipeline_id, workflow_execution=workflow_execution - ) - return ExecutionResponse( - str(workflow.id), - str(workflow_execution.id), - workflow_execution.status, - log_id=str(execution_service.execution_log_id), - error=workflow_execution.error_message, - mode=workflow_execution.execution_mode, - result=destination.api_results, - ) - except Exception as e: - logger.error(f"Error executing workflow {workflow}: {e}") - logger.error(f"Error {traceback.format_exc()}") - workflow_execution = WorkflowExecutionServiceHelper.update_execution_err( - execution_id, str(e) - ) - WorkflowHelper._update_pipeline_status( - pipeline_id=pipeline_id, workflow_execution=workflow_execution - ) - raise - finally: - destination.delete_execution_directory() - - @staticmethod - def _update_pipeline_status( - pipeline_id: Optional[str], workflow_execution: WorkflowExecution - ) -> None: - try: - if pipeline_id: - # Update pipeline status - if workflow_execution.status != ExecutionStatus.ERROR.value: - PipelineProcessor.update_pipeline( - pipeline_id, - Pipeline.PipelineStatus.SUCCESS, - execution_id=workflow_execution.id, - is_end=True, - ) - else: - PipelineProcessor.update_pipeline( - pipeline_id, - Pipeline.PipelineStatus.FAILURE, - execution_id=workflow_execution.id, - error_message=workflow_execution.error_message, - is_end=True, - ) - # Expected exception since API deployments are not tracked in Pipeline - except Pipeline.DoesNotExist: - api = APIDeploymentUtils.get_api_by_id(api_id=pipeline_id) - if api: - APIDeploymentUtils.send_notification( - api=api, workflow_execution=workflow_execution - ) - except Exception as e: - logger.warning( - f"Error updating pipeline {pipeline_id} status: {e}, " - f"with workflow execution: {workflow_execution}" - ) - - @staticmethod - def get_status_of_async_task( - execution_id: str, - ) -> ExecutionResponse: - """Get celery task status. - - Args: - execution_id (str): workflow execution id - - Raises: - TaskDoesNotExistError: Not found exception - - Returns: - ExecutionResponse: _description_ - """ - execution = WorkflowExecution.objects.get(id=execution_id) - - if not execution.task_id: - raise TaskDoesNotExistError() - - result = AsyncResult(str(execution.task_id)) - - task = AsyncResultData(async_result=result) - return ExecutionResponse( - execution.workflow_id, - execution_id, - execution.status, - result=task.result, - ) - - @staticmethod - def execute_workflow_async( - workflow_id: str, - execution_id: str, - hash_values_of_files: dict[str, FileHash], - timeout: int = -1, - pipeline_id: Optional[str] = None, - queue: Optional[str] = None, - use_file_history: bool = True, - ) -> ExecutionResponse: - """Adding a workflow to the queue for execution. - - Args: - workflow_id (str): workflowId - execution_id (str): Execution ID - timeout (int): Celery timeout (timeout -1 : async execution) - pipeline_id (Optional[str], optional): Optional pipeline. Defaults to None. - queue (Optional[str]): Name of the celery queue to push into - use_file_history (bool): Use FileHistory table to return results on already - processed files. Defaults to True - - Returns: - ExecutionResponse: Existing status of execution - """ - try: - file_hash_in_str = { - key: value.to_json() for key, value in hash_values_of_files.items() - } - org_schema = connection.tenant.schema_name - log_events_id = StateStore.get(Common.LOG_EVENTS_ID) - async_execution = WorkflowHelper.execute_bin.apply_async( - args=[ - org_schema, # schema_name - workflow_id, # workflow_id - execution_id, # execution_id - file_hash_in_str, # hash_values_of_files - ], - kwargs={ - "scheduled": False, - "execution_mode": None, - "pipeline_id": pipeline_id, - "log_events_id": log_events_id, - "use_file_history": use_file_history, - }, - queue=queue, - ) - logger.info( - f"Job '{async_execution}' has been enqueued for " - f"execution_id '{execution_id}'" - ) - if timeout > -1: - async_execution.wait( - timeout=timeout, - interval=CeleryConfigurations.INTERVAL, - ) - task = AsyncResultData(async_result=async_execution) - celery_result = task.to_dict() - task_result = celery_result.get("result") - workflow_execution = WorkflowExecution.objects.get(id=execution_id) - execution_response = ExecutionResponse( - workflow_id, - execution_id, - workflow_execution.status, - result=task_result, - ) - return execution_response - except celery_exceptions.TimeoutError: - return ExecutionResponse( - workflow_id, - execution_id, - async_execution.status, - message=WorkflowMessages.CELERY_TIMEOUT_MESSAGE, - ) - except Exception as error: - WorkflowExecutionServiceHelper.update_execution_err( - execution_id, str(error) - ) - logger.error(f"Errors while job enqueueing {str(error)}") - logger.error(f"Error {traceback.format_exc()}") - return ExecutionResponse( - workflow_id, - execution_id, - ExecutionStatus.ERROR.value, - error=str(error), - ) - - @staticmethod - @shared_task( - name="async_execute_bin", - acks_late=True, - autoretry_for=(Exception,), - max_retries=1, - retry_backoff=True, - retry_backoff_max=500, - retry_jitter=True, - ) - def execute_bin( - schema_name: str, - workflow_id: str, - execution_id: str, - hash_values_of_files: dict[str, dict[str, Any]], - scheduled: bool = False, - execution_mode: Optional[tuple[str, str]] = None, - pipeline_id: Optional[str] = None, - use_file_history: bool = True, - **kwargs: dict[str, Any], - ) -> Optional[list[Any]]: - """Asynchronous Execution By celery. - - Args: - schema_name (str): schema name to get Data - workflow_id (str): Workflow Id - execution_id (str): Id of the execution - scheduled (bool, optional): Represents if it is a scheduled execution - Defaults to False - execution_mode (Optional[WorkflowExecution.Mode]): WorkflowExecution Mode - Defaults to None - pipeline_id (Optional[str], optional): Id of pipeline. Defaults to None - use_file_history (bool): Use FileHistory table to return results on already - processed files. Defaults to True - - Kwargs: - log_events_id (str): Session ID of the user, - helps establish WS connection for streaming logs to the FE - - Returns: - dict[str, list[Any]]: Returns a dict with result from workflow execution - """ - hash_values = { - key: FileHash.from_json(value) - for key, value in hash_values_of_files.items() - } - task_id = current_task.request.id - tenant: Organization = ( - get_tenant_model().objects.filter(schema_name=schema_name).first() - ) - with tenant_context(tenant): - workflow = Workflow.objects.get(id=workflow_id) - try: - workflow_execution = ( - WorkflowExecutionServiceHelper.create_workflow_execution( - workflow_id=workflow_id, - single_step=False, - pipeline_id=pipeline_id, - mode=WorkflowExecution.Mode.QUEUE, - execution_id=execution_id, - **kwargs, # type: ignore - ) - ) - except IntegrityError: - # Use existing instance on retry attempt - workflow_execution = WorkflowExecution.objects.get(pk=execution_id) - WorkflowExecutionServiceHelper.update_execution_task( - execution_id=execution_id, task_id=task_id - ) - try: - execution_response = WorkflowHelper.run_workflow( - workflow=workflow, - organization_id=schema_name, - pipeline_id=pipeline_id, - scheduled=scheduled, - workflow_execution=workflow_execution, - execution_mode=execution_mode, - hash_values_of_files=hash_values, - use_file_history=use_file_history, - ) - except Exception as error: - error_message = traceback.format_exc() - logger.error( - f"Error executing execution {workflow_execution}: {error_message}" - ) - WorkflowExecutionServiceHelper.update_execution_err( - execution_id, str(error) - ) - raise - return execution_response.result - - @staticmethod - def complete_execution( - workflow: Workflow, - execution_id: Optional[str] = None, - pipeline_id: Optional[str] = None, - execution_mode: Optional[WorkflowExecution] = WorkflowExecution.Mode.QUEUE, - hash_values_of_files: dict[str, FileHash] = {}, - use_file_history: bool = True, - ) -> ExecutionResponse: - if pipeline_id: - logger.info(f"Executing pipeline: {pipeline_id}") - # Create a new WorkflowExecution entity for each pipeline execution. - # This ensures every pipeline run is tracked as a distinct execution. - workflow_execution = ( - WorkflowExecutionServiceHelper.create_workflow_execution( - workflow_id=workflow.id, - single_step=False, - pipeline_id=pipeline_id, - mode=execution_mode, - ) - ) - execution_id = workflow_execution.id - log_events_id = StateStore.get(Common.LOG_EVENTS_ID) - org_schema = connection.tenant.schema_name - if execution_mode == WorkflowExecution.Mode.INSTANT: - # Instant request from UX (Sync now in ETL and Workflow page) - response: ExecutionResponse = WorkflowHelper.execute_workflow_async( - workflow_id=workflow.id, - pipeline_id=pipeline_id, - execution_id=execution_id, - hash_values_of_files=hash_values_of_files, - use_file_history=use_file_history, - ) - return response - else: - execution_result = WorkflowHelper.execute_bin( - schema_name=org_schema, - workflow_id=workflow.id, - execution_id=workflow_execution.id, - hash_values_of_files=hash_values_of_files, - scheduled=True, - execution_mode=execution_mode, - pipeline_id=pipeline_id, - log_events_id=log_events_id, - use_file_history=use_file_history, - ) - - updated_execution = WorkflowExecution.objects.get(id=execution_id) - execution_response = ExecutionResponse( - workflow.id, - execution_id, - updated_execution.status, - result=execution_result, - ) - return execution_response - - if execution_id is None: - # Creating execution entity and return - return WorkflowHelper.create_and_make_execution_response( - workflow_id=workflow.id, pipeline_id=pipeline_id - ) - try: - # Normal execution - workflow_execution = WorkflowExecution.objects.get(pk=execution_id) - if ( - workflow_execution.status != ExecutionStatus.PENDING.value - or workflow_execution.execution_type != WorkflowExecution.Type.COMPLETE - ): - raise InvalidRequest(WorkflowErrors.INVALID_EXECUTION_ID) - return WorkflowHelper.run_workflow( - workflow=workflow, - workflow_execution=workflow_execution, - hash_values_of_files=hash_values_of_files, - use_file_history=use_file_history, - ) - except WorkflowExecution.DoesNotExist: - return WorkflowHelper.create_and_make_execution_response( - workflow_id=workflow.id, pipeline_id=pipeline_id - ) - - @staticmethod - def get_current_execution(execution_id: str) -> ExecutionResponse: - try: - workflow_execution = WorkflowExecution.objects.get(pk=execution_id) - return ExecutionResponse( - workflow_execution.workflow_id, - workflow_execution.id, - workflow_execution.status, - log_id=workflow_execution.execution_log_id, - error=workflow_execution.error_message, - mode=workflow_execution.execution_mode, - ) - except WorkflowExecution.DoesNotExist: - raise WorkflowExecutionNotExist() - - @staticmethod - def step_execution( - workflow: Workflow, - execution_action: str, - execution_id: Optional[str] = None, - hash_values_of_files: dict[str, FileHash] = {}, - ) -> ExecutionResponse: - if execution_action is Workflow.ExecutionAction.START.value: # type: ignore - if execution_id is None: - return WorkflowHelper.create_and_make_execution_response( - workflow_id=workflow.id, single_step=True - ) - try: - workflow_execution = WorkflowExecution.objects.get(pk=execution_id) - return WorkflowHelper.run_workflow( - workflow=workflow, - single_step=True, - workflow_execution=workflow_execution, - hash_values_of_files=hash_values_of_files, - ) - except WorkflowExecution.DoesNotExist: - return WorkflowHelper.create_and_make_execution_response( - workflow_id=workflow.id, single_step=True - ) - - else: - if execution_id is None: - raise InvalidRequest("execution_id is missed") - try: - workflow_execution = WorkflowExecution.objects.get(pk=execution_id) - except WorkflowExecution.DoesNotExist: - raise WorkflowExecutionNotExist(WorkflowErrors.INVALID_EXECUTION_ID) - if ( - workflow_execution.status != ExecutionStatus.PENDING.value - or workflow_execution.execution_type != WorkflowExecution.Type.STEP - ): - raise InvalidRequest(WorkflowErrors.INVALID_EXECUTION_ID) - current_action: Optional[str] = CacheService.get_key(execution_id) - logger.info(f"workflow_execution.current_action {current_action}") - if current_action is None: - raise InvalidRequest(WorkflowErrors.INVALID_EXECUTION_ID) - CacheService.set_key(execution_id, execution_action) - workflow_execution = WorkflowExecution.objects.get(pk=execution_id) - - return ExecutionResponse( - workflow.id, - execution_id, - workflow_execution.status, - log_id=workflow_execution.execution_log_id, - error=workflow_execution.error_message, - mode=workflow_execution.execution_mode, - ) - - @staticmethod - def create_and_make_execution_response( - workflow_id: str, - pipeline_id: Optional[str] = None, - single_step: bool = False, - mode: tuple[str, str] = WorkflowExecution.Mode.INSTANT, - ) -> ExecutionResponse: - log_events_id = StateStore.get(Common.LOG_EVENTS_ID) - workflow_execution = WorkflowExecutionServiceHelper.create_workflow_execution( - workflow_id=workflow_id, - single_step=single_step, - pipeline_id=pipeline_id, - mode=mode, - log_events_id=log_events_id, - ) - return ExecutionResponse( - workflow_execution.workflow_id, - workflow_execution.id, - workflow_execution.status, - log_id=workflow_execution.execution_log_id, - error=workflow_execution.error_message, - mode=workflow_execution.execution_mode, - ) - - # TODO: Access cache through a manager - @staticmethod - def clear_cache(workflow_id: str) -> dict[str, Any]: - """Function to clear cache with a specific pattern.""" - response: dict[str, Any] = {} - try: - key_pattern = f"*:cache:{workflow_id}:*" - CacheService.clear_cache(key_pattern) - response["message"] = WorkflowMessages.CACHE_CLEAR_SUCCESS - response["status"] = 200 - return response - except Exception as exc: - logger.error(f"Error occurred while clearing cache : {exc}") - response["message"] = WorkflowMessages.CACHE_CLEAR_FAILED - response["status"] = 400 - return response - - @staticmethod - def clear_file_marker(workflow_id: str) -> dict[str, Any]: - """Function to clear file marker from the cache.""" - # Clear file history from the table - response: dict[str, Any] = {} - workflow = Workflow.objects.get(id=workflow_id) - try: - FileHistoryHelper.clear_history_for_workflow(workflow=workflow) - response["message"] = WorkflowMessages.FILE_MARKER_CLEAR_SUCCESS - response["status"] = 200 - return response - except Exception as exc: - logger.error(f"Error occurred while clearing file marker : {exc}") - response["message"] = WorkflowMessages.FILE_MARKER_CLEAR_FAILED - response["status"] = 400 - return response - - @staticmethod - def get_workflow_execution_id(execution_id: str) -> str: - wf_exec_prefix = WorkflowExecutionKey.WORKFLOW_EXECUTION_ID_PREFIX - workflow_execution_id = f"{wf_exec_prefix}-{execution_id}" - return workflow_execution_id - - @staticmethod - def get_execution_by_id(execution_id: str) -> WorkflowExecution: - try: - execution: WorkflowExecution = WorkflowExecution.objects.get( - id=execution_id - ) - return execution - except WorkflowExecution.DoesNotExist: - raise WorkflowDoesNotExistError() - - @staticmethod - def make_async_result(obj: AsyncResult) -> dict[str, Any]: - return { - "id": obj.id, - "status": obj.status, - "result": obj.result, - "is_ready": obj.ready(), - "is_failed": obj.failed(), - "info": obj.info, - } - - @staticmethod - def can_update_workflow(workflow_id: str) -> dict[str, Any]: - try: - workflow: Workflow = Workflow.objects.get(pk=workflow_id) - if not workflow or workflow is None: - raise WorkflowDoesNotExistError() - used_count = Pipeline.objects.filter(workflow=workflow).count() - if used_count == 0: - used_count = APIDeployment.objects.filter(workflow=workflow).count() - return {"can_update": used_count == 0} - except Workflow.DoesNotExist: - logger.error(f"Error getting workflow: {id}") - raise WorkflowDoesNotExistError() - - -class WorkflowSchemaHelper: - """Helper class for workflow schema related methods.""" - - @staticmethod - def validate_request(schema_type: SchemaType, schema_entity: SchemaEntity) -> bool: - """Validates the given args for reading the JSON schema. - - Schema type of `src`, allows entities `file` and `api` - Schema type of `dest`, allows entities `db` - - Args: - schema_type (SchemaType): Enum with values `src`, `dest` - schema_entity (SchemaEntity): Enum with values `file`, `api`, `db` - - Raises: - serializers.ValidationError: If invalid values/ - combination is requested - - Returns: - bool: _description_ - """ - possible_types = [e.value for e in SchemaType] - possible_entities = [e.value for e in SchemaEntity] - - if schema_type.value not in possible_types: - raise serializers.ValidationError( - f"Invalid value for 'type': {schema_type.value}, " - f"should be one of {possible_types}" - ) - - if schema_entity.value not in possible_entities: - raise serializers.ValidationError( - f"Invalid value for 'entity': {schema_entity.value}, " - f"should be one of {possible_entities}" - ) - - if (schema_type == SchemaType.SRC and schema_entity == SchemaEntity.DB) or ( - schema_type == SchemaType.DEST and schema_entity != SchemaEntity.DB - ): - raise serializers.ValidationError( - f"Invalid values for 'type': {schema_type.value}, " - f"'entity': {schema_entity.value}." - f"Param 'type': {SchemaType.SRC.value} allows " - f"{SchemaEntity.FILE.value} and {SchemaEntity.API.value}" - f"'type': {SchemaType.DEST.value} allows " - f"{SchemaEntity.DB.value}." - ) - return True - - @staticmethod - def get_json_schema( - schema_type: SchemaType, schema_entity: SchemaEntity - ) -> dict[str, Any]: - """Reads and returns the JSON schema for the given args. - - Args: - schema_type (SchemaType): Enum with values `src`, `dest` - schema_entity (SchemaEntity): Enum with values `file`, `api`, `db` - - Returns: - dict[str, Any]: JSON schema for the requested entity - """ - schema_path = ( - f"{os.path.dirname(__file__)}/static/" f"{schema_type}/{schema_entity}.json" - ) - with open(schema_path, encoding="utf-8") as file: - schema = json.load(file) - return schema # type: ignore diff --git a/backend/workflow_manager/workflow_v2/dto.py b/backend/workflow_manager/workflow_v2/dto.py index 1f98f78d6..52571325a 100644 --- a/backend/workflow_manager/workflow_v2/dto.py +++ b/backend/workflow_manager/workflow_v2/dto.py @@ -63,6 +63,21 @@ def remove_result_metadata_keys(self, keys_to_remove: list[str] = []) -> None: self._remove_specific_keys(result=result, keys_to_remove=keys_to_remove) + def remove_result_metrics(self) -> None: + """Removes the 'metrics' key from the 'result' dictionary within each + 'result' dictionary in the 'result' list attribute of the instance. + """ + if not isinstance(self.result, list): + return + + for item in self.result: + if not isinstance(item, dict): + continue + + result = item.get("result") + if isinstance(result, dict): + result.pop("metrics", None) + def _remove_specific_keys(self, result: dict, keys_to_remove: list[str]) -> None: """Removes specified keys from the 'metadata' dictionary within the provided 'result' dictionary. If 'keys_to_remove' is empty, the diff --git a/backend/workflow_manager/workflow_v2/exceptions.py b/backend/workflow_manager/workflow_v2/exceptions.py index 39087ab8e..fa53b3023 100644 --- a/backend/workflow_manager/workflow_v2/exceptions.py +++ b/backend/workflow_manager/workflow_v2/exceptions.py @@ -21,6 +21,11 @@ class WorkflowDoesNotExistError(APIException): default_detail = "Workflow does not exist" +class ExecutionDoesNotExistError(APIException): + status_code = 404 + default_detail = "Execution does not exist." + + class TaskDoesNotExistError(APIException): status_code = 404 default_detail = "Task does not exist" diff --git a/backend/workflow_manager/workflow_v2/workflow_helper.py b/backend/workflow_manager/workflow_v2/workflow_helper.py index afb7962db..c2ecda704 100644 --- a/backend/workflow_manager/workflow_v2/workflow_helper.py +++ b/backend/workflow_manager/workflow_v2/workflow_helper.py @@ -414,17 +414,18 @@ def get_status_of_async_task( Raises: TaskDoesNotExistError: Not found exception + ExecutionDoesNotExistError: If execution is not found Returns: ExecutionResponse: _description_ """ execution = WorkflowExecution.objects.get(id=execution_id) - if not execution.task_id: - raise TaskDoesNotExistError() + raise TaskDoesNotExistError( + f"No task ID found for execution: {execution_id}" + ) result = AsyncResult(str(execution.task_id)) - task = AsyncResultData(async_result=result) # Prepare the initial response with the task's current status and result. diff --git a/frontend/package-lock.json b/frontend/package-lock.json index fcf2d882b..08f5bab55 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -34,7 +34,6 @@ "js-cookie": "^3.0.5", "js-yaml": "^4.1.0", "json-2-csv": "^5.5.1", - "markdown-to-jsx": "^7.2.1", "moment": "^2.29.4", "moment-timezone": "^0.5.45", "pdfjs-dist": "^3.4.120", @@ -52,6 +51,7 @@ "react-scripts": "5.0.1", "react-social-login-buttons": "^3.9.1", "remark-gfm": "^3.0.1", + "remarkable": "^2.0.1", "socket.io-client": "^4.7.2", "uuid": "^9.0.1", "zustand": "^4.3.8" @@ -5862,6 +5862,14 @@ "node": ">= 4.0.0" } }, + "node_modules/autolinker": { + "version": "3.16.2", + "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-3.16.2.tgz", + "integrity": "sha512-JiYl7j2Z19F9NdTmirENSUUIIL/9MytEWtmzhfmsKPCp9E+G35Y0UNCMoM9tFigxT59qSc8Ml2dlZXOCVTYwuA==", + "dependencies": { + "tslib": "^2.3.0" + } + }, "node_modules/autoprefixer": { "version": "10.4.14", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz", @@ -17537,6 +17545,29 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remarkable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/remarkable/-/remarkable-2.0.1.tgz", + "integrity": "sha512-YJyMcOH5lrR+kZdmB0aJJ4+93bEojRZ1HGDn9Eagu6ibg7aVZhc3OWbbShRid+Q5eAfsEqWxpe+g5W5nYNfNiA==", + "dependencies": { + "argparse": "^1.0.10", + "autolinker": "^3.11.0" + }, + "bin": { + "remarkable": "bin/remarkable.js" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/remarkable/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, "node_modules/renderkid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", @@ -24764,6 +24795,14 @@ "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==" }, + "autolinker": { + "version": "3.16.2", + "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-3.16.2.tgz", + "integrity": "sha512-JiYl7j2Z19F9NdTmirENSUUIIL/9MytEWtmzhfmsKPCp9E+G35Y0UNCMoM9tFigxT59qSc8Ml2dlZXOCVTYwuA==", + "requires": { + "tslib": "^2.3.0" + } + }, "autoprefixer": { "version": "10.4.14", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz", @@ -33085,6 +33124,25 @@ "unified": "^10.0.0" } }, + "remarkable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/remarkable/-/remarkable-2.0.1.tgz", + "integrity": "sha512-YJyMcOH5lrR+kZdmB0aJJ4+93bEojRZ1HGDn9Eagu6ibg7aVZhc3OWbbShRid+Q5eAfsEqWxpe+g5W5nYNfNiA==", + "requires": { + "argparse": "^1.0.10", + "autolinker": "^3.11.0" + }, + "dependencies": { + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + } + } + }, "renderkid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index e1709eddf..cd16580bd 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,7 +29,6 @@ "js-cookie": "^3.0.5", "js-yaml": "^4.1.0", "json-2-csv": "^5.5.1", - "markdown-to-jsx": "^7.2.1", "moment": "^2.29.4", "moment-timezone": "^0.5.45", "pdfjs-dist": "^3.4.120", @@ -47,6 +46,7 @@ "react-scripts": "5.0.1", "react-social-login-buttons": "^3.9.1", "remark-gfm": "^3.0.1", + "remarkable": "^2.0.1", "socket.io-client": "^4.7.2", "uuid": "^9.0.1", "zustand": "^4.3.8" diff --git a/frontend/src/components/agency/markdown-renderer/MarkdownRenderer.jsx b/frontend/src/components/agency/markdown-renderer/MarkdownRenderer.jsx index 22757f32e..411d2564e 100644 --- a/frontend/src/components/agency/markdown-renderer/MarkdownRenderer.jsx +++ b/frontend/src/components/agency/markdown-renderer/MarkdownRenderer.jsx @@ -1,32 +1,22 @@ import PropTypes from "prop-types"; -import ReactMarkdown from "markdown-to-jsx"; +import { memo, useMemo } from "react"; +import { Remarkable } from "remarkable"; -function MarkdownRenderer({ markdownText }) { - return ( - - {markdownText} - - ); -} +const md = new Remarkable(); -MarkdownRenderer.propTypes = { - markdownText: PropTypes.string, -}; +const MarkdownRenderer = memo(({ markdownText }) => { + const htmlContent = useMemo(() => { + if (!markdownText) return ""; + return md.render(markdownText); + }, [markdownText]); -const MyParagraph = ({ children, ...props }) => ( -
{children}
-); + return
; +}); -MyParagraph.propTypes = { - children: PropTypes.any, +MarkdownRenderer.displayName = "MarkdownRenderer"; + +MarkdownRenderer.propTypes = { + markdownText: PropTypes.string, }; export { MarkdownRenderer }; diff --git a/frontend/src/components/custom-tools/document-parser/DocumentParser.jsx b/frontend/src/components/custom-tools/document-parser/DocumentParser.jsx index b075c7e4e..43f64d111 100644 --- a/frontend/src/components/custom-tools/document-parser/DocumentParser.jsx +++ b/frontend/src/components/custom-tools/document-parser/DocumentParser.jsx @@ -180,29 +180,6 @@ function DocumentParser({ return outputs; }; - const getPromptCoverageCount = (promptId) => { - const keys = Object.keys(promptOutputs || {}); - const coverageKey = `coverage_${promptId}`; - const outputs = {}; - if (!keys?.length) { - details?.prompts?.forEach((prompt) => { - if (prompt?.coverage) { - const key = Object.keys(prompt?.coverage)[0]; - if (key?.startsWith(coverageKey)) { - outputs[key] = prompt?.coverage[key]; - } - } - }); - return outputs; - } - keys?.forEach((key) => { - if (key?.startsWith(coverageKey)) { - outputs[key] = promptOutputs[key]; - } - }); - return outputs; - }; - if (!details?.prompts?.length) { if (isSimplePromptStudio && SpsPromptsEmptyState) { return ; @@ -230,7 +207,7 @@ function DocumentParser({ outputs={getPromptOutputs(item?.prompt_id)} enforceTypeList={enforceTypeList} setUpdatedPromptsCopy={setUpdatedPromptsCopy} - coverageCountData={getPromptCoverageCount(item?.prompt_id)} + coverageCountData={item?.coverage} isChallenge={isChallenge} />
diff --git a/frontend/src/components/custom-tools/manage-docs-modal/ManageDocsModal.jsx b/frontend/src/components/custom-tools/manage-docs-modal/ManageDocsModal.jsx index 3d9d53e7d..d563e8a1e 100644 --- a/frontend/src/components/custom-tools/manage-docs-modal/ManageDocsModal.jsx +++ b/frontend/src/components/custom-tools/manage-docs-modal/ManageDocsModal.jsx @@ -33,6 +33,7 @@ import SpaceWrapper from "../../widgets/space-wrapper/SpaceWrapper"; import { SpinnerLoader } from "../../widgets/spinner-loader/SpinnerLoader"; import "./ManageDocsModal.css"; import usePostHogEvents from "../../../hooks/usePostHogEvents"; +import { usePromptOutputStore } from "../../../store/prompt-output-store"; let SummarizeStatusTitle = null; try { @@ -90,6 +91,7 @@ function ManageDocsModal({ const axiosPrivate = useAxiosPrivate(); const handleException = useExceptionHandler(); const { setPostHogCustomEvent } = usePostHogEvents(); + const { promptOutputs, updatePromptOutput } = usePromptOutputStore(); const successIndex = ( @@ -543,21 +545,48 @@ function ManageDocsModal({ ); updateCustomTool({ listOfDocs: newListOfDocs }); - if (newListOfDocs?.length === 1 && selectedDoc?.document_id !== docId) { - const doc = newListOfDocs[1]; + if (selectedDoc?.document_id === docId) { + const doc = newListOfDocs[0]; handleDocChange(doc); } - - if (docId === selectedDoc?.document_id) { - updateCustomTool({ selectedDoc: "" }); - handleUpdateTool({ output: "" }); - } + const updatedPromptDetails = removeIdFromCoverage(details, docId); + const updatedPromptOutput = removeIdFromCoverageOfPromptOutput( + promptOutputs, + docId + ); + updateCustomTool({ details: updatedPromptDetails }); + updatePromptOutput(updatedPromptOutput); }) .catch((err) => { setAlertDetails(handleException(err, "Failed to delete")); }); }; + const removeIdFromCoverage = (data, idToRemove) => { + if (data.prompts && Array.isArray(data.prompts)) { + data.prompts.forEach((prompt) => { + if (Array.isArray(prompt.coverage)) { + prompt.coverage = prompt.coverage.filter((id) => id !== idToRemove); + } + }); + } + return data; // Return the updated data + }; + + const removeIdFromCoverageOfPromptOutput = (data, idToRemove) => { + return Object.entries(data).reduce((updatedData, [key, value]) => { + // Create a new object for the current entry + updatedData[key] = { + ...value, + // Update the coverage array if it exists + coverage: value?.coverage + ? value?.coverage?.filter((id) => id !== idToRemove) + : value?.coverage, + }; + return updatedData; + }, {}); + }; + return ( 1; const divRef = useRef(null); const [enforceType, setEnforceType] = useState(""); - const profileId = singlePassExtractMode - ? defaultLlmProfile - : selectedLlmProfileId || defaultLlmProfile; - const coverageKey = generateCoverageKey(promptDetails?.prompt_id, profileId); + const promptId = promptDetails?.prompt_id; + const docId = selectedDoc?.document_id; + const promptProfile = promptDetails?.profile_manager || defaultLlmProfile; + const promptOutputKey = generatePromptOutputKey( + promptId, + docId, + promptProfile, + singlePassExtractMode, + true + ); + const promptCoverage = + promptOutputs[promptOutputKey]?.coverage || coverageCountData; useEffect(() => { if (enforceType !== promptDetails?.enforce_type) { @@ -214,7 +224,7 @@ function PromptCardItems({ )} - Coverage: {coverageCountData[coverageKey] || 0} of{" "} + Coverage: {promptCoverage?.length || 0} of{" "} {listOfDocs?.length || 0} docs diff --git a/frontend/src/hooks/usePromptOutput.js b/frontend/src/hooks/usePromptOutput.js index ad32591e3..5517ec968 100644 --- a/frontend/src/hooks/usePromptOutput.js +++ b/frontend/src/hooks/usePromptOutput.js @@ -91,7 +91,6 @@ const usePromptOutput = () => { let isTokenUsageForSinglePassAdded = false; const tokenUsageDetails = {}; - data.forEach((item) => { const promptId = item?.prompt_id; const docId = item?.document_manager; @@ -109,7 +108,6 @@ const usePromptOutput = () => { isSinglePass, true ); - const coverageKey = `coverage_${item?.prompt_id}_${llmProfile}`; outputs[key] = { runId: item?.run_id, promptOutputId: item?.prompt_output_id, @@ -119,8 +117,8 @@ const usePromptOutput = () => { tokenUsage: item?.token_usage, output: item?.output, timer, + coverage: item?.coverage, }; - outputs[coverageKey] = item?.coverage[coverageKey] || 0; if (item?.is_single_pass_extract && isTokenUsageForSinglePassAdded) return; @@ -150,7 +148,6 @@ const usePromptOutput = () => { ); tokenUsageDetails[tokenUsageId] = item?.token_usage; }); - if (isReset) { setPromptOutput(outputs); setTokenUsage(tokenUsageDetails); diff --git a/pdm.lock b/pdm.lock index 6647ead32..004ce9143 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "hook-check-django-migrations", "lint"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:b231dac83855401e53add4b3a5f8366937cabae6e84b516b5aadb822756117ec" +content_hash = "sha256:a261c6afdf9eba45f90b35b705f784d6f234d9722ea94a02071cc23d2bc5e999" [[package]] name = "adlfs" @@ -151,16 +151,16 @@ files = [ [[package]] name = "aiosignal" -version = "1.3.1" -requires_python = ">=3.7" +version = "1.3.2" +requires_python = ">=3.9" summary = "aiosignal: a list of registered asynchronous callbacks" groups = ["hook-check-django-migrations"] dependencies = [ "frozenlist>=1.1.0", ] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [[package]] @@ -311,13 +311,13 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" -requires_python = ">=3.7" +version = "24.3.0" +requires_python = ">=3.8" summary = "Classes Without Boilerplate" groups = ["hook-check-django-migrations"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [[package]] @@ -622,13 +622,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["hook-check-django-migrations"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -1447,7 +1447,7 @@ files = [ [[package]] name = "google-cloud-aiplatform" -version = "1.74.0" +version = "1.75.0" requires_python = ">=3.8" summary = "Vertex AI API client library" groups = ["hook-check-django-migrations"] @@ -1465,8 +1465,8 @@ dependencies = [ "shapely<3.0.0dev", ] files = [ - {file = "google_cloud_aiplatform-1.74.0-py2.py3-none-any.whl", hash = "sha256:7f37a835e543a4cb4b62505928b983e307c5fee6d949f831cd3804f03c753d87"}, - {file = "google_cloud_aiplatform-1.74.0.tar.gz", hash = "sha256:2202e4e0cbbd2db02835737a1ae9a51ad7bf75c8ed130a3fdbcfced33525e3f0"}, + {file = "google_cloud_aiplatform-1.75.0-py2.py3-none-any.whl", hash = "sha256:eb5d79b5f7210d79a22b53c93a69b5bae5680dfc829387ea020765b97786b3d0"}, + {file = "google_cloud_aiplatform-1.75.0.tar.gz", hash = "sha256:eb8404abf1134b3b368535fe429c4eec2fd12d444c2e9ffbc329ddcbc72b36c9"}, ] [[package]] @@ -1509,7 +1509,7 @@ files = [ [[package]] name = "google-cloud-resource-manager" -version = "1.13.1" +version = "1.14.0" requires_python = ">=3.7" summary = "Google Cloud Resource Manager API client library" groups = ["hook-check-django-migrations"] @@ -1521,8 +1521,8 @@ dependencies = [ "protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev,>=3.20.2", ] files = [ - {file = "google_cloud_resource_manager-1.13.1-py2.py3-none-any.whl", hash = "sha256:abdc7d443ab6c0763b8ed49ab59203e223f14c683df69e3748d5eb2237475f5f"}, - {file = "google_cloud_resource_manager-1.13.1.tar.gz", hash = "sha256:bee9f2fb1d856731182b7cc05980d216aae848947ccdadf2848a2c64ccd6bbea"}, + {file = "google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c"}, + {file = "google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30"}, ] [[package]] @@ -1921,7 +1921,7 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.26.5" +version = "0.27.0" requires_python = ">=3.8.0" summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" groups = ["hook-check-django-migrations"] @@ -1935,8 +1935,8 @@ dependencies = [ "typing-extensions>=3.7.4.3", ] files = [ - {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, - {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, + {file = "huggingface_hub-0.27.0-py3-none-any.whl", hash = "sha256:8f2e834517f1f1ddf1ecc716f91b120d7333011b7485f665a9a412eacb1a2a81"}, + {file = "huggingface_hub-0.27.0.tar.gz", hash = "sha256:902cce1a1be5739f5589e560198a65a8edcfd3b830b1666f36e4b961f0454fac"}, ] [[package]] @@ -2978,7 +2978,7 @@ files = [ [[package]] name = "openai" -version = "1.57.2" +version = "1.58.1" requires_python = ">=3.8" summary = "The official Python library for the openai API" groups = ["hook-check-django-migrations"] @@ -2993,8 +2993,8 @@ dependencies = [ "typing-extensions<5,>=4.11", ] files = [ - {file = "openai-1.57.2-py3-none-any.whl", hash = "sha256:f7326283c156fdee875746e7e54d36959fb198eadc683952ee05e3302fbd638d"}, - {file = "openai-1.57.2.tar.gz", hash = "sha256:5f49fd0f38e9f2131cda7deb45dafdd1aee4f52a637e190ce0ecf40147ce8cee"}, + {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, + {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, ] [[package]] @@ -3995,16 +3995,16 @@ files = [ [[package]] name = "redis" -version = "5.0.8" -requires_python = ">=3.7" +version = "5.2.1" +requires_python = ">=3.8" summary = "Python client for Redis database and key-value store" groups = ["hook-check-django-migrations"] dependencies = [ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"", ] files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [[package]] @@ -5103,7 +5103,7 @@ dependencies = [ "kombu==5.3.7", "llama-index-llms-azure-openai==0.1.10", "llama-index==0.10.58", - "redis~=5.0.1", + "redis~=5.2.1", "requests==2.31.0", ] @@ -5134,7 +5134,7 @@ dependencies = [ [[package]] name = "unstract-sdk" -version = "0.54.0rc6" +version = "0.54.0rc8" requires_python = "<3.11.1,>=3.9" summary = "A framework for writing Unstract Tools/Apps" groups = ["hook-check-django-migrations"] @@ -5168,13 +5168,14 @@ dependencies = [ "pdfplumber>=0.11.2", "python-dotenv==1.0.0", "python-magic~=0.4.27", + "redis>=5.2.1", "singleton-decorator~=1.0.0", "tiktoken~=0.4.0", "transformers==4.37.0", ] files = [ - {file = "unstract_sdk-0.54.0rc6-py3-none-any.whl", hash = "sha256:bb0bbee12a7fb47d53adc14a2d9ddbfcebcf6abed8b0b6deb927f64921c34630"}, - {file = "unstract_sdk-0.54.0rc6.tar.gz", hash = "sha256:410d01a07402fe8b80a1d253daded10512f36c2801e4fc94258b4d2fe9d785fb"}, + {file = "unstract_sdk-0.54.0rc8-py3-none-any.whl", hash = "sha256:c71a4a20c2ae9aac6830297251f74b0e798392ff19fee8481befbf7f4e3b8ba2"}, + {file = "unstract_sdk-0.54.0rc8.tar.gz", hash = "sha256:08a1ec113f96b93c39e0d1b9df39db8eb7957eee1365f5fb5b1432b1b4353a25"}, ] [[package]] @@ -5190,7 +5191,7 @@ dependencies = [ "docker~=6.1.3", "jsonschema~=4.18.2", "unstract-flags", - "unstract-sdk~=0.54.0rc6", + "unstract-sdk~=0.54.0rc8", "unstract-tool-sandbox", ] diff --git a/platform-service/pdm.lock b/platform-service/pdm.lock index 35eb3b408..4c712789f 100644 --- a/platform-service/pdm.lock +++ b/platform-service/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "deploy", "test"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:93ad68d0734d1e694986a24f1b41f7d765d29bf8a4667c096d13b96676aba648" +content_hash = "sha256:aede0a8da478846169f4af665bfb9819cc8f12370884a85c3c1484f3e569fd93" [[package]] name = "aiohappyeyeballs" @@ -85,16 +85,16 @@ files = [ [[package]] name = "aiosignal" -version = "1.3.1" -requires_python = ">=3.7" +version = "1.3.2" +requires_python = ">=3.9" summary = "aiosignal: a list of registered asynchronous callbacks" groups = ["default"] dependencies = [ "frozenlist>=1.1.0", ] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [[package]] @@ -197,13 +197,13 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" -requires_python = ">=3.7" +version = "24.3.0" +requires_python = ">=3.8" summary = "Classes Without Boilerplate" groups = ["default"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [[package]] @@ -281,23 +281,23 @@ files = [ [[package]] name = "boto3" -version = "1.35.79" +version = "1.35.83" requires_python = ">=3.8" summary = "The AWS SDK for Python" groups = ["default"] dependencies = [ - "botocore<1.36.0,>=1.35.79", + "botocore<1.36.0,>=1.35.83", "jmespath<2.0.0,>=0.7.1", "s3transfer<0.11.0,>=0.10.0", ] files = [ - {file = "boto3-1.35.79-py3-none-any.whl", hash = "sha256:a673b0b6378c9ccbf045a31a43195b175e12aa5c37fb7635fcbfc8f48fb857b3"}, - {file = "boto3-1.35.79.tar.gz", hash = "sha256:1fa26217cd33ded82e55aed4460cd55f7223fa647916aa0d3c5d6828e6ec7135"}, + {file = "boto3-1.35.83-py3-none-any.whl", hash = "sha256:a4828d67b12892cb11fe9e6d86f40975a06db470676e61194968e3a32ec4c536"}, + {file = "boto3-1.35.83.tar.gz", hash = "sha256:df2e0d57241de0f9c31b62e73093c2126e4fd73b87b1897ecf280a1b87a2b825"}, ] [[package]] name = "botocore" -version = "1.35.79" +version = "1.35.83" requires_python = ">=3.8" summary = "Low-level, data-driven core of boto 3." groups = ["default"] @@ -308,8 +308,8 @@ dependencies = [ "urllib3<1.27,>=1.25.4; python_version < \"3.10\"", ] files = [ - {file = "botocore-1.35.79-py3-none-any.whl", hash = "sha256:e6b10bb9a357e3f5ca2e60f6dd15a85d311b9a476eb21b3c0c2a3b364a2897c8"}, - {file = "botocore-1.35.79.tar.gz", hash = "sha256:245bfdda1b1508539ddd1819c67a8a2cc81780adf0715d3de418d64c4247f346"}, + {file = "botocore-1.35.83-py3-none-any.whl", hash = "sha256:ba363183e4df79fbcfd5f3600fd473bd45a1de03d0d0b5e78abd59f276971d27"}, + {file = "botocore-1.35.83.tar.gz", hash = "sha256:df5e4384838e50bbafd47e9b5fefb995e83cbb9412e7cd7c0db9555174d91bba"}, ] [[package]] @@ -325,13 +325,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -817,7 +817,7 @@ files = [ [[package]] name = "google-cloud-aiplatform" -version = "1.74.0" +version = "1.75.0" requires_python = ">=3.8" summary = "Vertex AI API client library" groups = ["default"] @@ -835,8 +835,8 @@ dependencies = [ "shapely<3.0.0dev", ] files = [ - {file = "google_cloud_aiplatform-1.74.0-py2.py3-none-any.whl", hash = "sha256:7f37a835e543a4cb4b62505928b983e307c5fee6d949f831cd3804f03c753d87"}, - {file = "google_cloud_aiplatform-1.74.0.tar.gz", hash = "sha256:2202e4e0cbbd2db02835737a1ae9a51ad7bf75c8ed130a3fdbcfced33525e3f0"}, + {file = "google_cloud_aiplatform-1.75.0-py2.py3-none-any.whl", hash = "sha256:eb5d79b5f7210d79a22b53c93a69b5bae5680dfc829387ea020765b97786b3d0"}, + {file = "google_cloud_aiplatform-1.75.0.tar.gz", hash = "sha256:eb8404abf1134b3b368535fe429c4eec2fd12d444c2e9ffbc329ddcbc72b36c9"}, ] [[package]] @@ -876,7 +876,7 @@ files = [ [[package]] name = "google-cloud-resource-manager" -version = "1.13.1" +version = "1.14.0" requires_python = ">=3.7" summary = "Google Cloud Resource Manager API client library" groups = ["default"] @@ -888,8 +888,8 @@ dependencies = [ "protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev,>=3.20.2", ] files = [ - {file = "google_cloud_resource_manager-1.13.1-py2.py3-none-any.whl", hash = "sha256:abdc7d443ab6c0763b8ed49ab59203e223f14c683df69e3748d5eb2237475f5f"}, - {file = "google_cloud_resource_manager-1.13.1.tar.gz", hash = "sha256:bee9f2fb1d856731182b7cc05980d216aae848947ccdadf2848a2c64ccd6bbea"}, + {file = "google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c"}, + {file = "google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30"}, ] [[package]] @@ -1271,7 +1271,7 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.26.5" +version = "0.27.0" requires_python = ">=3.8.0" summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" groups = ["default"] @@ -1285,8 +1285,8 @@ dependencies = [ "typing-extensions>=3.7.4.3", ] files = [ - {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, - {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, + {file = "huggingface_hub-0.27.0-py3-none-any.whl", hash = "sha256:8f2e834517f1f1ddf1ecc716f91b120d7333011b7485f665a9a412eacb1a2a81"}, + {file = "huggingface_hub-0.27.0.tar.gz", hash = "sha256:902cce1a1be5739f5589e560198a65a8edcfd3b830b1666f36e4b961f0454fac"}, ] [[package]] @@ -2289,7 +2289,7 @@ files = [ [[package]] name = "openai" -version = "1.57.2" +version = "1.58.1" requires_python = ">=3.8" summary = "The official Python library for the openai API" groups = ["default"] @@ -2304,8 +2304,8 @@ dependencies = [ "typing-extensions<5,>=4.11", ] files = [ - {file = "openai-1.57.2-py3-none-any.whl", hash = "sha256:f7326283c156fdee875746e7e54d36959fb198eadc683952ee05e3302fbd638d"}, - {file = "openai-1.57.2.tar.gz", hash = "sha256:5f49fd0f38e9f2131cda7deb45dafdd1aee4f52a637e190ce0ecf40147ce8cee"}, + {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, + {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, ] [[package]] @@ -3073,16 +3073,16 @@ files = [ [[package]] name = "redis" -version = "5.0.8" -requires_python = ">=3.7" +version = "5.2.1" +requires_python = ">=3.8" summary = "Python client for Redis database and key-value store" groups = ["default"] dependencies = [ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"", ] files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [[package]] @@ -3824,7 +3824,7 @@ dependencies = [ [[package]] name = "unstract-sdk" -version = "0.54.0rc6" +version = "0.54.0rc8" requires_python = "<3.11.1,>=3.9" summary = "A framework for writing Unstract Tools/Apps" groups = ["default"] @@ -3858,13 +3858,14 @@ dependencies = [ "pdfplumber>=0.11.2", "python-dotenv==1.0.0", "python-magic~=0.4.27", + "redis>=5.2.1", "singleton-decorator~=1.0.0", "tiktoken~=0.4.0", "transformers==4.37.0", ] files = [ - {file = "unstract_sdk-0.54.0rc6-py3-none-any.whl", hash = "sha256:bb0bbee12a7fb47d53adc14a2d9ddbfcebcf6abed8b0b6deb927f64921c34630"}, - {file = "unstract_sdk-0.54.0rc6.tar.gz", hash = "sha256:410d01a07402fe8b80a1d253daded10512f36c2801e4fc94258b4d2fe9d785fb"}, + {file = "unstract_sdk-0.54.0rc8-py3-none-any.whl", hash = "sha256:c71a4a20c2ae9aac6830297251f74b0e798392ff19fee8481befbf7f4e3b8ba2"}, + {file = "unstract_sdk-0.54.0rc8.tar.gz", hash = "sha256:08a1ec113f96b93c39e0d1b9df39db8eb7957eee1365f5fb5b1432b1b4353a25"}, ] [[package]] diff --git a/platform-service/pyproject.toml b/platform-service/pyproject.toml index 0a75f83ca..2b37df2fc 100644 --- a/platform-service/pyproject.toml +++ b/platform-service/pyproject.toml @@ -10,10 +10,10 @@ dependencies = [ "peewee~=3.16", "psycopg2-binary~=2.9", "python-dotenv~=1.0", - "redis~=5.0.1", + "redis~=5.2.1", "cryptography>=41.0.7", "requests>=2.31.0", - "unstract-sdk==0.54.0rc6", + "unstract-sdk==0.54.0rc8", "gcsfs==2024.10.0", "unstract-flags @ file:///${PROJECT_ROOT}/../unstract/flags", ] diff --git a/prompt-service/pdm.lock b/prompt-service/pdm.lock index c3952b00e..e47cffc8a 100644 --- a/prompt-service/pdm.lock +++ b/prompt-service/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "deploy"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:8c54898df048878a3ff4a51af551372bb13580f6e3be98e2db3de232a4bb5a88" +content_hash = "sha256:52f8a57159f764635a4687e7bbc24f8c8fb7baa880e4f88195ffd8a0c1709619" [[package]] name = "aiohappyeyeballs" @@ -85,16 +85,16 @@ files = [ [[package]] name = "aiosignal" -version = "1.3.1" -requires_python = ">=3.7" +version = "1.3.2" +requires_python = ">=3.9" summary = "aiosignal: a list of registered asynchronous callbacks" groups = ["default"] dependencies = [ "frozenlist>=1.1.0", ] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [[package]] @@ -211,13 +211,13 @@ files = [ [[package]] name = "attrs" -version = "24.2.0" -requires_python = ">=3.7" +version = "24.3.0" +requires_python = ">=3.8" summary = "Classes Without Boilerplate" groups = ["default"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [[package]] @@ -339,13 +339,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -785,7 +785,7 @@ files = [ [[package]] name = "google-cloud-aiplatform" -version = "1.74.0" +version = "1.75.0" requires_python = ">=3.8" summary = "Vertex AI API client library" groups = ["default"] @@ -803,8 +803,8 @@ dependencies = [ "shapely<3.0.0dev", ] files = [ - {file = "google_cloud_aiplatform-1.74.0-py2.py3-none-any.whl", hash = "sha256:7f37a835e543a4cb4b62505928b983e307c5fee6d949f831cd3804f03c753d87"}, - {file = "google_cloud_aiplatform-1.74.0.tar.gz", hash = "sha256:2202e4e0cbbd2db02835737a1ae9a51ad7bf75c8ed130a3fdbcfced33525e3f0"}, + {file = "google_cloud_aiplatform-1.75.0-py2.py3-none-any.whl", hash = "sha256:eb5d79b5f7210d79a22b53c93a69b5bae5680dfc829387ea020765b97786b3d0"}, + {file = "google_cloud_aiplatform-1.75.0.tar.gz", hash = "sha256:eb8404abf1134b3b368535fe429c4eec2fd12d444c2e9ffbc329ddcbc72b36c9"}, ] [[package]] @@ -844,7 +844,7 @@ files = [ [[package]] name = "google-cloud-resource-manager" -version = "1.13.1" +version = "1.14.0" requires_python = ">=3.7" summary = "Google Cloud Resource Manager API client library" groups = ["default"] @@ -856,8 +856,8 @@ dependencies = [ "protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev,>=3.20.2", ] files = [ - {file = "google_cloud_resource_manager-1.13.1-py2.py3-none-any.whl", hash = "sha256:abdc7d443ab6c0763b8ed49ab59203e223f14c683df69e3748d5eb2237475f5f"}, - {file = "google_cloud_resource_manager-1.13.1.tar.gz", hash = "sha256:bee9f2fb1d856731182b7cc05980d216aae848947ccdadf2848a2c64ccd6bbea"}, + {file = "google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c"}, + {file = "google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30"}, ] [[package]] @@ -1239,7 +1239,7 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.26.5" +version = "0.27.0" requires_python = ">=3.8.0" summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" groups = ["default"] @@ -1253,8 +1253,8 @@ dependencies = [ "typing-extensions>=3.7.4.3", ] files = [ - {file = "huggingface_hub-0.26.5-py3-none-any.whl", hash = "sha256:fb7386090bbe892072e64b85f7c4479fd2d65eea5f2543327c970d5169e83924"}, - {file = "huggingface_hub-0.26.5.tar.gz", hash = "sha256:1008bd18f60bfb65e8dbc0a97249beeeaa8c99d3c2fa649354df9fa5a13ed83b"}, + {file = "huggingface_hub-0.27.0-py3-none-any.whl", hash = "sha256:8f2e834517f1f1ddf1ecc716f91b120d7333011b7485f665a9a412eacb1a2a81"}, + {file = "huggingface_hub-0.27.0.tar.gz", hash = "sha256:902cce1a1be5739f5589e560198a65a8edcfd3b830b1666f36e4b961f0454fac"}, ] [[package]] @@ -2251,7 +2251,7 @@ files = [ [[package]] name = "openai" -version = "1.57.2" +version = "1.58.1" requires_python = ">=3.8" summary = "The official Python library for the openai API" groups = ["default"] @@ -2266,8 +2266,8 @@ dependencies = [ "typing-extensions<5,>=4.11", ] files = [ - {file = "openai-1.57.2-py3-none-any.whl", hash = "sha256:f7326283c156fdee875746e7e54d36959fb198eadc683952ee05e3302fbd638d"}, - {file = "openai-1.57.2.tar.gz", hash = "sha256:5f49fd0f38e9f2131cda7deb45dafdd1aee4f52a637e190ce0ecf40147ce8cee"}, + {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, + {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, ] [[package]] @@ -3005,16 +3005,16 @@ files = [ [[package]] name = "redis" -version = "5.0.8" -requires_python = ">=3.7" +version = "5.2.1" +requires_python = ">=3.8" summary = "Python client for Redis database and key-value store" groups = ["default"] dependencies = [ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"", ] files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [[package]] @@ -3718,7 +3718,7 @@ dependencies = [ "kombu==5.3.7", "llama-index-llms-azure-openai==0.1.10", "llama-index==0.10.58", - "redis~=5.0.1", + "redis~=5.2.1", "requests==2.31.0", ] @@ -3736,7 +3736,7 @@ dependencies = [ [[package]] name = "unstract-sdk" -version = "0.54.0rc6" +version = "0.54.0rc8" requires_python = "<3.11.1,>=3.9" summary = "A framework for writing Unstract Tools/Apps" groups = ["default"] @@ -3770,13 +3770,14 @@ dependencies = [ "pdfplumber>=0.11.2", "python-dotenv==1.0.0", "python-magic~=0.4.27", + "redis>=5.2.1", "singleton-decorator~=1.0.0", "tiktoken~=0.4.0", "transformers==4.37.0", ] files = [ - {file = "unstract_sdk-0.54.0rc6-py3-none-any.whl", hash = "sha256:bb0bbee12a7fb47d53adc14a2d9ddbfcebcf6abed8b0b6deb927f64921c34630"}, - {file = "unstract_sdk-0.54.0rc6.tar.gz", hash = "sha256:410d01a07402fe8b80a1d253daded10512f36c2801e4fc94258b4d2fe9d785fb"}, + {file = "unstract_sdk-0.54.0rc8-py3-none-any.whl", hash = "sha256:c71a4a20c2ae9aac6830297251f74b0e798392ff19fee8481befbf7f4e3b8ba2"}, + {file = "unstract_sdk-0.54.0rc8.tar.gz", hash = "sha256:08a1ec113f96b93c39e0d1b9df39db8eb7957eee1365f5fb5b1432b1b4353a25"}, ] [[package]] diff --git a/prompt-service/pyproject.toml b/prompt-service/pyproject.toml index 17232e867..b8da97c18 100644 --- a/prompt-service/pyproject.toml +++ b/prompt-service/pyproject.toml @@ -15,7 +15,7 @@ dependencies = [ "flask~=3.0", "llama-index==0.10.58", "python-dotenv==1.0.0", - "unstract-sdk~=0.54.0rc6", + "unstract-sdk~=0.54.0rc8", "redis>=5.0.3", "unstract-core @ file:///${PROJECT_ROOT}/../unstract/core", "unstract-flags @ file:///${PROJECT_ROOT}/../unstract/flags", diff --git a/prompt-service/src/unstract/prompt_service/constants.py b/prompt-service/src/unstract/prompt_service/constants.py index 8da5405b7..4a64bfb96 100644 --- a/prompt-service/src/unstract/prompt_service/constants.py +++ b/prompt-service/src/unstract/prompt_service/constants.py @@ -74,7 +74,8 @@ class PromptServiceContants: CONFIDENCE_DATA = "confidence_data" REQUIRED_FIELDS = "required_fields" REQUIRED = "required" - + EXECUTION_SOURCE = "execution_source" + METRICS = "metrics" class RunLevel(Enum): """Different stages of prompt execution. @@ -102,3 +103,20 @@ class DBTableV2: PROMPT_STUDIO_REGISTRY = "prompt_studio_registry" PLATFORM_KEY = "platform_key" TOKEN_USAGE = "usage" + + +class FileStorageKeys: + FILE_STORAGE_PROVIDER = "FILE_STORAGE_PROVIDER" + FILE_STORAGE_CREDENTIALS = "FILE_STORAGE_CREDENTIALS" + PERMANENT_REMOTE_STORAGE = "PERMANENT_REMOTE_STORAGE" + TEMPORARY_REMOTE_STORAGE = "TEMPORARY_REMOTE_STORAGE" + + +class FileStorageType(Enum): + PERMANENT = "permanent" + TEMPORARY = "temporary" + + +class ExecutionSource(Enum): + IDE = "ide" + TOOL = "tool" diff --git a/prompt-service/src/unstract/prompt_service/helper.py b/prompt-service/src/unstract/prompt_service/helper.py index 64307af41..999a5744e 100644 --- a/prompt-service/src/unstract/prompt_service/helper.py +++ b/prompt-service/src/unstract/prompt_service/helper.py @@ -7,7 +7,12 @@ from dotenv import load_dotenv from flask import Flask, current_app from unstract.prompt_service.config import db -from unstract.prompt_service.constants import DBTableV2 +from unstract.prompt_service.constants import ( + DBTableV2, + ExecutionSource, + FeatureFlag, + FileStorageKeys, +) from unstract.prompt_service.constants import PromptServiceContants as PSKeys from unstract.prompt_service.db_utils import DBUtils from unstract.prompt_service.env_manager import EnvLoader @@ -16,6 +21,13 @@ from unstract.sdk.exceptions import SdkError from unstract.sdk.llm import LLM +from unstract.flags.feature_flag import check_feature_flag_status + +if check_feature_flag_status(FeatureFlag.REMOTE_FILE_STORAGE): + from unstract.sdk.file_storage import FileStorage, FileStorageProvider + from unstract.sdk.file_storage.constants import StorageType + from unstract.sdk.file_storage.env_helper import EnvHelper + load_dotenv() # Global variable to store plugins @@ -278,6 +290,7 @@ def run_completion( prompt_type: Optional[str] = PSKeys.TEXT, enable_highlight: bool = False, file_path: str = "", + execution_source: Optional[str] = None, ) -> str: logger: Logger = current_app.logger try: @@ -286,9 +299,27 @@ def run_completion( ) highlight_data = None if highlight_data_plugin and enable_highlight: - highlight_data = highlight_data_plugin["entrypoint_cls"]( - logger=current_app.logger, file_path=file_path - ).run + if check_feature_flag_status(FeatureFlag.REMOTE_FILE_STORAGE): + fs_instance: FileStorage = FileStorage(FileStorageProvider.LOCAL) + if execution_source == ExecutionSource.IDE.value: + fs_instance = EnvHelper.get_storage( + storage_type=StorageType.PERMANENT, + env_name=FileStorageKeys.PERMANENT_REMOTE_STORAGE, + ) + if execution_source == ExecutionSource.TOOL.value: + fs_instance = EnvHelper.get_storage( + storage_type=StorageType.TEMPORARY, + env_name=FileStorageKeys.TEMPORARY_REMOTE_STORAGE, + ) + highlight_data = highlight_data_plugin["entrypoint_cls"]( + logger=current_app.logger, + file_path=file_path, + fs_instance=fs_instance, + ).run + else: + highlight_data = highlight_data_plugin["entrypoint_cls"]( + logger=current_app.logger, file_path=file_path + ).run completion = llm.complete( prompt=prompt, process_text=highlight_data, @@ -323,6 +354,7 @@ def extract_table( structured_output: dict[str, Any], llm: LLM, enforce_type: str, + execution_source: str, ) -> dict[str, Any]: table_settings = output[PSKeys.TABLE_SETTINGS] table_extractor: dict[str, Any] = plugins.get("table-extractor", {}) @@ -331,10 +363,32 @@ def extract_table( "Unable to extract table details. " "Please contact admin to resolve this issue." ) + if check_feature_flag_status(FeatureFlag.REMOTE_FILE_STORAGE): + fs_instance: FileStorage = FileStorage(FileStorageProvider.LOCAL) + if execution_source == ExecutionSource.IDE.value: + fs_instance = EnvHelper.get_storage( + storage_type=StorageType.PERMANENT, + env_name=FileStorageKeys.PERMANENT_REMOTE_STORAGE, + ) + if execution_source == ExecutionSource.TOOL.value: + fs_instance = EnvHelper.get_storage( + storage_type=StorageType.TEMPORARY, + env_name=FileStorageKeys.TEMPORARY_REMOTE_STORAGE, + ) try: - answer = table_extractor["entrypoint_cls"].extract_large_table( - llm=llm, table_settings=table_settings, enforce_type=enforce_type - ) + if check_feature_flag_status(FeatureFlag.REMOTE_FILE_STORAGE): + answer = table_extractor["entrypoint_cls"].extract_large_table( + llm=llm, + table_settings=table_settings, + enforce_type=enforce_type, + fs_instance=fs_instance, + ) + else: + answer = table_extractor["entrypoint_cls"].extract_large_table( + llm=llm, + table_settings=table_settings, + enforce_type=enforce_type, + ) structured_output[output[PSKeys.NAME]] = answer # We do not support summary and eval for table. # Hence returning the result diff --git a/prompt-service/src/unstract/prompt_service/main.py b/prompt-service/src/unstract/prompt_service/main.py index c2a67ab5b..25a3776ad 100644 --- a/prompt-service/src/unstract/prompt_service/main.py +++ b/prompt-service/src/unstract/prompt_service/main.py @@ -111,7 +111,10 @@ def prompt_processor() -> Any: PSKeys.CONTEXT: {}, PSKeys.REQUIRED_FIELDS: {}, } + metrics: dict = {} variable_names: list[str] = [] + # Identifier for source of invocation + execution_source = payload.get(PSKeys.EXECUTION_SOURCE, "") publish_log( log_events_id, {"tool_id": tool_id, "run_id": run_id, "doc_name": doc_name}, @@ -132,8 +135,7 @@ def prompt_processor() -> Any: prompt_text = output[PSKeys.PROMPT] chunk_size = output[PSKeys.CHUNK_SIZE] util = PromptServiceBaseTool(platform_key=platform_key) - index = Index(tool=util) - + index = Index(tool=util, run_id=run_id, capture_metrics=True) if VariableExtractor.is_variables_present(prompt_text=prompt_text): prompt_text = VariableExtractor.replace_variables_in_prompt( prompt=output, @@ -194,6 +196,7 @@ def prompt_processor() -> Any: **usage_kwargs, PSKeys.LLM_USAGE_REASON: PSKeys.EXTRACTION, }, + capture_metrics=True, ) embedding = Embedding( @@ -231,6 +234,7 @@ def prompt_processor() -> Any: structured_output=structured_output, llm=llm, enforce_type=output[PSKeys.TYPE], + execution_source=execution_source, ) metadata = query_usage_metadata(token=platform_key, metadata=metadata) response = { @@ -495,6 +499,7 @@ def prompt_processor() -> Any: **usage_kwargs, PSKeys.LLM_USAGE_REASON: PSKeys.CHALLENGE, }, + capture_metrics=True, ) challenge = challenge_plugin["entrypoint_cls"]( llm=llm, @@ -598,6 +603,18 @@ def prompt_processor() -> Any: f"No eval plugin found to evaluate prompt: {output[PSKeys.NAME]}" # noqa: E501 ) finally: + challenge_metrics = ( + {f"{challenge_llm.get_usage_reason()}_llm": challenge_llm.get_metrics()} + if enable_challenge + else {} + ) + metrics.setdefault(prompt_name, {}).update( + { + "context_retrieval": index.get_metrics(), + f"{llm.get_usage_reason()}_llm": llm.get_metrics(), + **challenge_metrics, + } + ) vector_db.close() publish_log( log_events_id, @@ -630,7 +647,11 @@ def prompt_processor() -> Any: "Execution complete", ) metadata = query_usage_metadata(token=platform_key, metadata=metadata) - response = {PSKeys.METADATA: metadata, PSKeys.OUTPUT: structured_output} + response = { + PSKeys.METADATA: metadata, + PSKeys.OUTPUT: structured_output, + PSKeys.METRICS: metrics, + } return response diff --git a/pyproject.toml b/pyproject.toml index 16f1bccf1..189fdf0af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ hook-check-django-migrations = [ "psycopg2-binary==2.9.9", "python-dotenv==1.0.0", "python-magic==0.4.27", - "unstract-sdk~=0.54.0rc6", + "unstract-sdk~=0.54.0rc8", "-e unstract-connectors @ file:///${PROJECT_ROOT}/unstract/connectors", "-e unstract-core @ file:///${PROJECT_ROOT}/unstract/core", "-e unstract-flags @ file:///${PROJECT_ROOT}/unstract/flags", diff --git a/tools/structure/requirements.txt b/tools/structure/requirements.txt index a5f4b5a83..8c8cfaf23 100644 --- a/tools/structure/requirements.txt +++ b/tools/structure/requirements.txt @@ -1,6 +1,6 @@ # Add your dependencies here # Required for all unstract tools -unstract-sdk~=0.54.0rc6 +unstract-sdk~=0.54.0rc8 # Required for remote storage support s3fs[boto3]==2024.6.0 diff --git a/tools/structure/src/config/properties.json b/tools/structure/src/config/properties.json index 46e024e92..9d41bf2f7 100644 --- a/tools/structure/src/config/properties.json +++ b/tools/structure/src/config/properties.json @@ -2,7 +2,7 @@ "schemaVersion": "0.0.1", "displayName": "Structure Tool", "functionName": "structure_tool", - "toolVersion": "0.0.52", + "toolVersion": "0.0.53", "description": "This is a template tool which can answer set of input prompts designed in the Prompt Studio", "input": { "description": "File that needs to be indexed and parsed for answers" diff --git a/tools/structure/src/constants.py b/tools/structure/src/constants.py index 8cf7c8653..8f77d9ed3 100644 --- a/tools/structure/src/constants.py +++ b/tools/structure/src/constants.py @@ -75,3 +75,7 @@ class SettingsKeys: CONFIDENCE_DATA = "confidence_data" EXECUTION_RUN_DATA_FOLDER = "EXECUTION_RUN_DATA_FOLDER" FILE_PATH = "file_path" + EXECUTION_SOURCE = "execution_source" + TOOL = "tool" + METRICS = "metrics" + INDEXING = "indexing" diff --git a/tools/structure/src/main.py b/tools/structure/src/main.py index a1736c554..e9c671ea1 100644 --- a/tools/structure/src/main.py +++ b/tools/structure/src/main.py @@ -82,7 +82,6 @@ def run( self.stream_update(output_log, state=LogState.OUTPUT_UPDATE) file_hash = self.get_exec_metadata.get(MetadataKey.SOURCE_HASH) - index = Index(tool=self) tool_id = tool_metadata[SettingsKeys.TOOL_ID] tool_settings = tool_metadata[SettingsKeys.TOOL_SETTINGS] outputs = tool_metadata[SettingsKeys.OUTPUTS] @@ -105,6 +104,12 @@ def run( self.get_env_or_die(SettingsKeys.EXECUTION_RUN_DATA_FOLDER) ) run_id = CommonUtils.generate_uuid() + index = Index( + tool=self, + run_id=run_id, + capture_metrics=True, + ) + index_metrics = {} extracted_input_file = str(execution_run_data_folder / SettingsKeys.EXTRACT) # TODO : Resolve and pass log events ID payload = { @@ -115,6 +120,7 @@ def run( SettingsKeys.FILE_HASH: file_hash, SettingsKeys.FILE_NAME: file_name, SettingsKeys.FILE_PATH: extracted_input_file, + SettingsKeys.EXECUTION_SOURCE: SettingsKeys.TOOL, } # TODO: Need to split extraction and indexing # to avoid unwanted indexing @@ -151,6 +157,7 @@ def run( else {} ), ) + index_metrics = {SettingsKeys.INDEXING: index.get_metrics()} if summarize_as_source: summarize_file_hash = self._summarize_and_index( tool_id=tool_id, @@ -187,6 +194,10 @@ def run( usage_kwargs=usage_kwargs, process_text=process_text, ) + index_metrics[output[SettingsKeys.NAME]] = { + SettingsKeys.INDEXING: index.get_metrics() + } + index.clear_metrics() if summarize_as_source: summarize_file_hash = self._summarize_and_index( @@ -242,6 +253,22 @@ def run( structured_output_dict[SettingsKeys.METADATA] = metadata structured_output = json.dumps(structured_output_dict) + metrics = structured_output_dict.get(SettingsKeys.METRICS, {}) + new_metrics = {} + if tool_settings[SettingsKeys.ENABLE_SINGLE_PASS_EXTRACTION]: + new_metrics = { + **metrics, + **index_metrics, + } + else: + # Merge dictionaries + new_metrics = { + key: {**metrics.get(key, {}), **index_metrics.get(key, {})} + for key in set(metrics) + | set(index_metrics) # Union of keys from both dictionaries + } + if new_metrics: + structured_output_dict[SettingsKeys.METRICS] = new_metrics # Update GUI output_log = ( f"## Result\n**NOTE:** In case of a deployed pipeline, the result would " diff --git a/unstract/core/pdm.lock b/unstract/core/pdm.lock index ebc019f79..45013fbf1 100644 --- a/unstract/core/pdm.lock +++ b/unstract/core/pdm.lock @@ -5,100 +5,101 @@ groups = ["default"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:13e7cb5563015e24dc4426a6769327dd96ba76ea383369cf4eb7e04289f70ce2" +content_hash = "sha256:e71de2d2c54ff0be034448ecaf502b179a921cfcde1f445c1a2773a625c1bf6d" [[package]] name = "aiohappyeyeballs" -version = "2.3.5" +version = "2.4.4" requires_python = ">=3.8" summary = "Happy Eyeballs for asyncio" groups = ["default"] files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, + {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, + {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, ] [[package]] name = "aiohttp" -version = "3.10.3" -requires_python = ">=3.8" +version = "3.11.10" +requires_python = ">=3.9" summary = "Async http client/server framework (asyncio)" groups = ["default"] dependencies = [ "aiohappyeyeballs>=2.3.0", "aiosignal>=1.1.2", - "async-timeout<5.0,>=4.0; python_version < \"3.11\"", + "async-timeout<6.0,>=4.0; python_version < \"3.11\"", "attrs>=17.3.0", "frozenlist>=1.1.1", "multidict<7.0,>=4.5", - "yarl<2.0,>=1.0", -] -files = [ - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"}, - {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"}, - {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38d91b98b4320ffe66efa56cb0f614a05af53b675ce1b8607cdb2ac826a8d58e"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9743fa34a10a36ddd448bba8a3adc2a66a1c575c3c2940301bacd6cc896c6bf1"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c126f532caf238031c19d169cfae3c6a59129452c990a6e84d6e7b198a001dc"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926e68438f05703e500b06fe7148ef3013dd6f276de65c68558fa9974eeb59ad"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434b3ab75833accd0b931d11874e206e816f6e6626fd69f643d6a8269cd9166a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d35235a44ec38109b811c3600d15d8383297a8fab8e3dec6147477ec8636712a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59c489661edbd863edb30a8bd69ecb044bd381d1818022bc698ba1b6f80e5dd1"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50544fe498c81cb98912afabfc4e4d9d85e89f86238348e3712f7ca6a2f01dab"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09bc79275737d4dc066e0ae2951866bb36d9c6b460cb7564f111cc0427f14844"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:af4dbec58e37f5afff4f91cdf235e8e4b0bd0127a2a4fd1040e2cad3369d2f06"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b22cae3c9dd55a6b4c48c63081d31c00fc11fa9db1a20c8a50ee38c1a29539d2"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ba562736d3fbfe9241dad46c1a8994478d4a0e50796d80e29d50cabe8fbfcc3f"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f25d6c4e82d7489be84f2b1c8212fafc021b3731abdb61a563c90e37cced3a21"}, - {file = "aiohttp-3.10.3-cp39-cp39-win32.whl", hash = "sha256:b69d832e5f5fa15b1b6b2c8eb6a9fd2c0ec1fd7729cb4322ed27771afc9fc2ac"}, - {file = "aiohttp-3.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:673bb6e3249dc8825df1105f6ef74e2eab779b7ff78e96c15cadb78b04a83752"}, - {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"}, + "propcache>=0.2.0", + "yarl<2.0,>=1.17.0", +] +files = [ + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"}, + {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"}, + {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"}, + {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"}, + {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"}, + {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"}, + {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"}, + {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"}, ] [[package]] name = "aiosignal" -version = "1.3.1" -requires_python = ">=3.7" +version = "1.3.2" +requires_python = ">=3.9" summary = "aiosignal: a list of registered asynchronous callbacks" groups = ["default"] dependencies = [ "frozenlist>=1.1.0", ] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [[package]] name = "amqp" -version = "5.2.0" +version = "5.3.1" requires_python = ">=3.6" summary = "Low-level AMQP client for Python (fork of amqplib)." groups = ["default"] @@ -106,8 +107,8 @@ dependencies = [ "vine<6.0.0,>=5.0.0", ] files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, + {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, + {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, ] [[package]] @@ -123,47 +124,47 @@ files = [ [[package]] name = "anyio" -version = "4.4.0" -requires_python = ">=3.8" +version = "4.7.0" +requires_python = ">=3.9" summary = "High level compatibility layer for multiple asynchronous event loop implementations" groups = ["default"] dependencies = [ "exceptiongroup>=1.0.2; python_version < \"3.11\"", "idna>=2.8", "sniffio>=1.1", - "typing-extensions>=4.1; python_version < \"3.11\"", + "typing-extensions>=4.5; python_version < \"3.13\"", ] files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [[package]] name = "async-timeout" -version = "4.0.3" -requires_python = ">=3.7" +version = "5.0.1" +requires_python = ">=3.8" summary = "Timeout context manager for asyncio programs" groups = ["default"] marker = "python_full_version < \"3.11.3\"" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] name = "attrs" -version = "24.2.0" -requires_python = ">=3.7" +version = "24.3.0" +requires_python = ">=3.8" summary = "Classes Without Boilerplate" groups = ["default"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [[package]] name = "azure-core" -version = "1.30.2" +version = "1.32.0" requires_python = ">=3.8" summary = "Microsoft Azure Core Library for Python" groups = ["default"] @@ -173,26 +174,26 @@ dependencies = [ "typing-extensions>=4.6.0", ] files = [ - {file = "azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472"}, - {file = "azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a"}, + {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, + {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, ] [[package]] name = "azure-identity" -version = "1.17.1" +version = "1.19.0" requires_python = ">=3.8" summary = "Microsoft Azure Identity Library for Python" groups = ["default"] dependencies = [ - "azure-core>=1.23.0", + "azure-core>=1.31.0", "cryptography>=2.5", - "msal-extensions>=0.3.0", - "msal>=1.24.0", + "msal-extensions>=1.2.0", + "msal>=1.30.0", "typing-extensions>=4.0.0", ] files = [ - {file = "azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea"}, - {file = "azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382"}, + {file = "azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81"}, + {file = "azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83"}, ] [[package]] @@ -211,23 +212,23 @@ files = [ [[package]] name = "boto3" -version = "1.34.158" +version = "1.34.162" requires_python = ">=3.8" summary = "The AWS SDK for Python" groups = ["default"] dependencies = [ - "botocore<1.35.0,>=1.34.158", + "botocore<1.35.0,>=1.34.162", "jmespath<2.0.0,>=0.7.1", "s3transfer<0.11.0,>=0.10.0", ] files = [ - {file = "boto3-1.34.158-py3-none-any.whl", hash = "sha256:c29e9b7e1034e8734ccaffb9f2b3f3df2268022fd8a93d836604019f8759ce27"}, - {file = "boto3-1.34.158.tar.gz", hash = "sha256:5b7b2ce0ec1e498933f600d29f3e1c641f8c44dd7e468c26795359d23d81fa39"}, + {file = "boto3-1.34.162-py3-none-any.whl", hash = "sha256:d6f6096bdab35a0c0deff469563b87d184a28df7689790f7fe7be98502b7c590"}, + {file = "boto3-1.34.162.tar.gz", hash = "sha256:873f8f5d2f6f85f1018cbb0535b03cceddc7b655b61f66a0a56995238804f41f"}, ] [[package]] name = "botocore" -version = "1.34.158" +version = "1.34.162" requires_python = ">=3.8" summary = "Low-level, data-driven core of boto 3." groups = ["default"] @@ -238,24 +239,24 @@ dependencies = [ "urllib3<1.27,>=1.25.4; python_version < \"3.10\"", ] files = [ - {file = "botocore-1.34.158-py3-none-any.whl", hash = "sha256:0e6fceba1e39bfa8feeba70ba3ac2af958b3387df4bd3b5f2db3f64c1754c756"}, - {file = "botocore-1.34.158.tar.gz", hash = "sha256:5934082e25ad726673afbf466092fb1223dafa250e6e756c819430ba6b1b3da5"}, + {file = "botocore-1.34.162-py3-none-any.whl", hash = "sha256:2d918b02db88d27a75b48275e6fb2506e9adaaddbec1ffa6a8a0898b34e769be"}, + {file = "botocore-1.34.162.tar.gz", hash = "sha256:adc23be4fb99ad31961236342b7cbf3c0bfc62532cd02852196032e8c0d682f3"}, ] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.12.14" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default"] files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" requires_python = ">=3.8" summary = "Foreign Function Interface for Python calling C code." groups = ["default"] @@ -264,99 +265,99 @@ dependencies = [ "pycparser", ] files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" requires_python = ">=3.7.0" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." groups = ["default"] files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -387,7 +388,7 @@ files = [ [[package]] name = "cryptography" -version = "43.0.0" +version = "43.0.3" requires_python = ">=3.7" summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." groups = ["default"] @@ -395,33 +396,33 @@ dependencies = [ "cffi>=1.12; platform_python_implementation != \"PyPy\"", ] files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [[package]] @@ -441,16 +442,16 @@ files = [ [[package]] name = "deprecated" -version = "1.2.14" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.2.15" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" summary = "Python @deprecated decorator to deprecate old python classes, functions or methods." groups = ["default"] dependencies = [ "wrapt<2,>=1.10", ] files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, ] [[package]] @@ -488,107 +489,107 @@ files = [ [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.5.0" requires_python = ">=3.8" summary = "A list-like structure which implements collections.abc.MutableSequence" groups = ["default"] files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, ] [[package]] name = "fsspec" -version = "2024.6.1" +version = "2024.10.0" requires_python = ">=3.8" summary = "File-system specification" groups = ["default"] files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, + {file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"}, + {file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"}, ] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" requires_python = ">=3.7" summary = "Lightweight in-process concurrent programming" groups = ["default"] files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [[package]] @@ -604,7 +605,7 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.7" requires_python = ">=3.8" summary = "A minimal low-level HTTP client." groups = ["default"] @@ -613,13 +614,13 @@ dependencies = [ "h11<0.15,>=0.13", ] files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [[package]] name = "httpx" -version = "0.27.0" +version = "0.28.1" requires_python = ">=3.8" summary = "The next generation HTTP client." groups = ["default"] @@ -628,68 +629,67 @@ dependencies = [ "certifi", "httpcore==1.*", "idna", - "sniffio", ] files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [[package]] name = "idna" -version = "3.7" -requires_python = ">=3.5" +version = "3.10" +requires_python = ">=3.6" summary = "Internationalized Domain Names in Applications (IDNA)" groups = ["default"] files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [[package]] name = "jiter" -version = "0.5.0" +version = "0.8.2" requires_python = ">=3.8" summary = "Fast iterable JSON parser." groups = ["default"] files = [ - {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, - {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, - {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, - {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, - {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, - {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, - {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, - {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, - {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, ] [[package]] @@ -732,7 +732,7 @@ files = [ [[package]] name = "llama-cloud" -version = "0.0.13" +version = "0.1.6" requires_python = "<4,>=3.8" summary = "" groups = ["default"] @@ -741,8 +741,8 @@ dependencies = [ "pydantic>=1.10", ] files = [ - {file = "llama_cloud-0.0.13-py3-none-any.whl", hash = "sha256:b641450308b80c85eeae7ef9cb5a3b4a3b1823d5cde05b626ce33f7494ec6229"}, - {file = "llama_cloud-0.0.13.tar.gz", hash = "sha256:0e3165a22f8df34a00d13f1f5739438ba4d620f2d8a9289df830078a39fe6f1f"}, + {file = "llama_cloud-0.1.6-py3-none-any.whl", hash = "sha256:43595081e03ff552fd18d9553fcaada897ff267456c0f89f4cb098b927dc4dc7"}, + {file = "llama_cloud-0.1.6.tar.gz", hash = "sha256:21200f6fdd46e08455d34b136f645ce6b8c3800e0ae13d8077913171a921da5a"}, ] [[package]] @@ -868,8 +868,8 @@ files = [ [[package]] name = "llama-index-legacy" -version = "0.9.48" -requires_python = ">=3.8.1,<4.0" +version = "0.9.48.post4" +requires_python = "<4.0,>=3.8.1" summary = "Interface between LLMs and your data" groups = ["default"] dependencies = [ @@ -882,7 +882,7 @@ dependencies = [ "httpx", "nest-asyncio<2.0.0,>=1.5.8", "networkx>=3.0", - "nltk<4.0.0,>=3.8.1", + "nltk>=3.8.1", "numpy", "openai>=1.1.0", "pandas", @@ -893,8 +893,8 @@ dependencies = [ "typing-inspect>=0.8.0", ] files = [ - {file = "llama_index_legacy-0.9.48-py3-none-any.whl", hash = "sha256:714ada95beac179b4acefa4d2deff74bb7b2f22b0f699ac247d4cb67738d16d4"}, - {file = "llama_index_legacy-0.9.48.tar.gz", hash = "sha256:82ddc4691edbf49533d65582c249ba22c03fe96fbd3e92f7758dccef28e43834"}, + {file = "llama_index_legacy-0.9.48.post4-py3-none-any.whl", hash = "sha256:4b817d7c343fb5f7f00c4410eff519f320013b8d5f24c4fedcf270c471f92038"}, + {file = "llama_index_legacy-0.9.48.post4.tar.gz", hash = "sha256:f8a9764e7e134a52bfef5e53d2d62561bfc01fc09874c51cc001df6f5302ae30"}, ] [[package]] @@ -916,7 +916,7 @@ files = [ [[package]] name = "llama-index-llms-openai" -version = "0.1.29" +version = "0.1.31" requires_python = "<4.0,>=3.8.1" summary = "llama-index llms openai integration" groups = ["default"] @@ -925,8 +925,8 @@ dependencies = [ "openai<2.0.0,>=1.40.0", ] files = [ - {file = "llama_index_llms_openai-0.1.29-py3-none-any.whl", hash = "sha256:8ae9a9f595b3654405fd54f3dbc8b58b259be8eeea2f58650609869e8362cab5"}, - {file = "llama_index_llms_openai-0.1.29.tar.gz", hash = "sha256:15a4fa65a3d2ecf7e29a090273ec595d44553baea72a1ebe5b42fe3c527f7121"}, + {file = "llama_index_llms_openai-0.1.31-py3-none-any.whl", hash = "sha256:800815b1b964b7d8dddd0e02a09fb57ac5f2ec6f80db92cd704dae718846023f"}, + {file = "llama_index_llms_openai-0.1.31.tar.gz", hash = "sha256:c235493f453b92903722054a8dfb1452ea850eac47a68a38bab3b823988d56fe"}, ] [[package]] @@ -1024,32 +1024,32 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" -requires_python = ">=3.8" +version = "3.23.1" +requires_python = ">=3.9" summary = "A lightweight library for converting complex datatypes to and from native Python datatypes." groups = ["default"] dependencies = [ "packaging>=17.0", ] files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, + {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, ] [[package]] name = "msal" -version = "1.30.0" +version = "1.31.1" requires_python = ">=3.7" summary = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." groups = ["default"] dependencies = [ "PyJWT[crypto]<3,>=1.0.0", - "cryptography<45,>=2.5", + "cryptography<46,>=2.5", "requests<3,>=2.0.0", ] files = [ - {file = "msal-1.30.0-py3-none-any.whl", hash = "sha256:423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de"}, - {file = "msal-1.30.0.tar.gz", hash = "sha256:b4bf00850092e465157d814efa24a18f788284c9a479491024d62903085ea2fb"}, + {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, + {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, ] [[package]] @@ -1069,58 +1069,61 @@ files = [ [[package]] name = "multidict" -version = "6.0.5" -requires_python = ">=3.7" +version = "6.1.0" +requires_python = ">=3.8" summary = "multidict implementation" groups = ["default"] -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +dependencies = [ + "typing-extensions>=4.1.0; python_version < \"3.11\"", +] +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] [[package]] @@ -1158,7 +1161,7 @@ files = [ [[package]] name = "nltk" -version = "3.8.2" +version = "3.9.1" requires_python = ">=3.8" summary = "Natural Language Toolkit" groups = ["default"] @@ -1169,8 +1172,8 @@ dependencies = [ "tqdm", ] files = [ - {file = "nltk-3.8.2-py3-none-any.whl", hash = "sha256:bae044ae22ebe0b694a87c0012233373209f27d5c76d3572599c842740a62fe0"}, - {file = "nltk-3.8.2.tar.gz", hash = "sha256:9c051aa981c6745894906d5c3aad27417f3d1c10d91eefca50382fc922966f31"}, + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, ] [[package]] @@ -1212,8 +1215,8 @@ files = [ [[package]] name = "openai" -version = "1.40.3" -requires_python = ">=3.7.1" +version = "1.58.1" +requires_python = ">=3.8" summary = "The official Python library for the openai API" groups = ["default"] dependencies = [ @@ -1227,24 +1230,24 @@ dependencies = [ "typing-extensions<5,>=4.11", ] files = [ - {file = "openai-1.40.3-py3-none-any.whl", hash = "sha256:09396cb6e2e15c921a5d872bf92841a60a9425da10dcd962b45fe7c4f48f8395"}, - {file = "openai-1.40.3.tar.gz", hash = "sha256:f2ffe907618240938c59d7ccc67dd01dc8c50be203c0077240db6758d2f02480"}, + {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, + {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, ] [[package]] name = "packaging" -version = "24.1" +version = "24.2" requires_python = ">=3.8" summary = "Core utilities for Python packages" groups = ["default"] files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" requires_python = ">=3.9" summary = "Powerful data structures for data analysis, time series, and statistics" groups = ["default"] @@ -1256,85 +1259,82 @@ dependencies = [ "tzdata>=2022.7", ] files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [[package]] name = "pillow" -version = "10.4.0" -requires_python = ">=3.8" +version = "11.0.0" +requires_python = ">=3.9" summary = "Python Imaging Library (Fork)" groups = ["default"] files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [[package]] @@ -1351,6 +1351,65 @@ files = [ {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, ] +[[package]] +name = "propcache" +version = "0.2.1" +requires_python = ">=3.9" +summary = "Accelerated property cache" +groups = ["default"] +files = [ + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"}, + {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"}, + {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"}, + {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"}, + {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"}, + {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"}, + {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"}, + {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"}, + {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, +] + [[package]] name = "pycparser" version = "2.22" @@ -1365,23 +1424,23 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.10.3" requires_python = ">=3.8" summary = "Data validation using Python type hints" groups = ["default"] dependencies = [ - "annotated-types>=0.4.0", - "pydantic-core==2.20.1", - "typing-extensions>=4.6.1; python_version < \"3.13\"", + "annotated-types>=0.6.0", + "pydantic-core==2.27.1", + "typing-extensions>=4.12.2", ] files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.27.1" requires_python = ">=3.8" summary = "Core functionality for Pydantic validation and serialization" groups = ["default"] @@ -1389,86 +1448,92 @@ dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [[package]] name = "pyjwt" -version = "2.9.0" -requires_python = ">=3.8" +version = "2.10.1" +requires_python = ">=3.9" summary = "JSON Web Token implementation in Python" groups = ["default"] files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [[package]] name = "pyjwt" -version = "2.9.0" +version = "2.10.1" extras = ["crypto"] -requires_python = ">=3.8" +requires_python = ">=3.9" summary = "JSON Web Token implementation in Python" groups = ["default"] dependencies = [ - "PyJWT==2.9.0", + "PyJWT==2.10.1", "cryptography>=3.4.0", ] files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [[package]] @@ -1501,28 +1566,29 @@ files = [ [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" summary = "World timezone definitions, modern and historical" groups = ["default"] files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pywin32" -version = "306" +version = "308" summary = "Python for Window Extensions" groups = ["default"] marker = "platform_system == \"Windows\"" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] @@ -1564,73 +1630,73 @@ files = [ [[package]] name = "redis" -version = "5.0.8" -requires_python = ">=3.7" +version = "5.2.1" +requires_python = ">=3.8" summary = "Python client for Redis database and key-value store" groups = ["default"] dependencies = [ "async-timeout>=4.0.3; python_full_version < \"3.11.3\"", ] files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [[package]] name = "regex" -version = "2024.7.24" +version = "2024.11.6" requires_python = ">=3.8" summary = "Alternative regular expression module, to replace re." groups = ["default"] files = [ - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, - {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, - {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, - {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, - {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, - {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, - {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, - {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, ] [[package]] @@ -1652,7 +1718,7 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.2" +version = "0.10.4" requires_python = ">=3.8" summary = "An Amazon S3 Transfer Manager" groups = ["default"] @@ -1660,19 +1726,19 @@ dependencies = [ "botocore<2.0a.0,>=1.33.2", ] files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, + {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, + {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, ] [[package]] name = "six" -version = "1.16.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.17.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Python 2 and 3 compatibility utilities" groups = ["default"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -1688,18 +1754,18 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" requires_python = ">=3.8" summary = "A modern CSS selector implementation for Beautiful Soup." groups = ["default"] files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sqlalchemy" -version = "2.0.32" +version = "2.0.36" requires_python = ">=3.7" summary = "Database Abstraction Library" groups = ["default"] @@ -1708,72 +1774,72 @@ dependencies = [ "typing-extensions>=4.6.0", ] files = [ - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, - {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, - {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [[package]] name = "sqlalchemy" -version = "2.0.32" +version = "2.0.36" extras = ["asyncio"] requires_python = ">=3.7" summary = "Database Abstraction Library" groups = ["default"] dependencies = [ - "SQLAlchemy==2.0.32", + "SQLAlchemy==2.0.36", "greenlet!=0.4.17", ] files = [ - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, - {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, - {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [[package]] @@ -1799,8 +1865,8 @@ files = [ [[package]] name = "tiktoken" -version = "0.7.0" -requires_python = ">=3.8" +version = "0.8.0" +requires_python = ">=3.9" summary = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" groups = ["default"] dependencies = [ @@ -1808,33 +1874,30 @@ dependencies = [ "requests>=2.26.0", ] files = [ - {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, - {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, - {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, - {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, - {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, - {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, + {file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"}, + {file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"}, + {file = "tiktoken-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e13f37bc4ef2d012731e93e0fef21dc3b7aea5bb9009618de9a4026844e560"}, + {file = "tiktoken-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f13d13c981511331eac0d01a59b5df7c0d4060a8be1e378672822213da51e0a2"}, + {file = "tiktoken-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6b2ddbc79a22621ce8b1166afa9f9a888a664a579350dc7c09346a3b5de837d9"}, + {file = "tiktoken-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d8c2d0e5ba6453a290b86cd65fc51fedf247e1ba170191715b049dac1f628005"}, + {file = "tiktoken-0.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d622d8011e6d6f239297efa42a2657043aaed06c4f68833550cac9e9bc723ef1"}, + {file = "tiktoken-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2efaf6199717b4485031b4d6edb94075e4d79177a172f38dd934d911b588d54a"}, + {file = "tiktoken-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5637e425ce1fc49cf716d88df3092048359a4b3bbb7da762840426e937ada06d"}, + {file = "tiktoken-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb0e352d1dbe15aba082883058b3cce9e48d33101bdaac1eccf66424feb5b47"}, + {file = "tiktoken-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56edfefe896c8f10aba372ab5706b9e3558e78db39dd497c940b47bf228bc419"}, + {file = "tiktoken-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:326624128590def898775b722ccc327e90b073714227175ea8febbc920ac0a99"}, + {file = "tiktoken-0.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17807445f0cf1f25771c9d86496bd8b5c376f7419912519699f3cc4dc5c12e"}, + {file = "tiktoken-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:886f80bd339578bbdba6ed6d0567a0d5c6cfe198d9e587ba6c447654c65b8edc"}, + {file = "tiktoken-0.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6adc8323016d7758d6de7313527f755b0fc6c72985b7d9291be5d96d73ecd1e1"}, + {file = "tiktoken-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b591fb2b30d6a72121a80be24ec7a0e9eb51c5500ddc7e4c2496516dd5e3816b"}, + {file = "tiktoken-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:845287b9798e476b4d762c3ebda5102be87ca26e5d2c9854002825d60cdb815d"}, + {file = "tiktoken-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:1473cfe584252dc3fa62adceb5b1c763c1874e04511b197da4e6de51d6ce5a02"}, + {file = "tiktoken-0.8.0.tar.gz", hash = "sha256:9ccbb2740f24542534369c5635cfd9b2b3c2490754a78ac8831d99f89f94eeb2"}, ] [[package]] name = "tqdm" -version = "4.66.5" +version = "4.67.1" requires_python = ">=3.7" summary = "Fast, Extensible Progress Meter" groups = ["default"] @@ -1842,8 +1905,8 @@ dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [[package]] @@ -1873,24 +1936,24 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" requires_python = ">=2" summary = "Provider of IANA time zone data" groups = ["default"] files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "urllib3" -version = "1.26.19" +version = "1.26.20" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" summary = "HTTP library with thread-safe connection pooling, file post, and more." groups = ["default"] files = [ - {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, - {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [[package]] @@ -1906,101 +1969,102 @@ files = [ [[package]] name = "wrapt" -version = "1.16.0" -requires_python = ">=3.6" +version = "1.17.0" +requires_python = ">=3.8" summary = "Module for decorators, wrappers and monkey patching." groups = ["default"] files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, + {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, + {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, + {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, + {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, + {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, + {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, + {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, + {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, + {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, + {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, ] [[package]] name = "yarl" -version = "1.9.4" -requires_python = ">=3.7" +version = "1.18.3" +requires_python = ">=3.9" summary = "Yet another URL library" groups = ["default"] dependencies = [ "idna>=2.0", "multidict>=4.0", -] -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + "propcache>=0.2.0", +] +files = [ + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, ] diff --git a/unstract/core/pyproject.toml b/unstract/core/pyproject.toml index 24db7055d..251779a34 100644 --- a/unstract/core/pyproject.toml +++ b/unstract/core/pyproject.toml @@ -14,7 +14,7 @@ dependencies = [ "botocore~=1.34.0", "llama-index==0.10.58", "llama-index-llms-azure-openai==0.1.10", - "redis~=5.0.1", + "redis~=5.2.1", "requests==2.31.0", "kombu==5.3.7", ] diff --git a/unstract/tool-registry/pyproject.toml b/unstract/tool-registry/pyproject.toml index 2557360cc..b6d812286 100644 --- a/unstract/tool-registry/pyproject.toml +++ b/unstract/tool-registry/pyproject.toml @@ -13,7 +13,7 @@ dependencies = [ "docker~=6.1.3", "jsonschema~=4.18.2", "PyYAML~=6.0.1", - "unstract-sdk~=0.54.0rc6", + "unstract-sdk~=0.54.0rc8", # ! IMPORTANT! # Local dependencies usually need to be added as: # https://pdm-project.org/latest/usage/dependency/#local-dependencies