diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index f652bef060d..39916defc25 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -2,6 +2,9 @@ Release History =============== +1.0.3 +* Support new features in ADF + 1.0.2 * Support new features in ADF diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index 9d12f544c41..127c83a86a1 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse @@ -230,7 +231,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "DataFactoryManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py index 2f781d74082..8139854b97b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -1441,7 +1443,7 @@ def _deserialize(self, target_obj, data): elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: attributes = response._attribute_map # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py index 142a0420b39..b77ac924608 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "8.0.0" +VERSION = "9.0.0" diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index ec9b4794fba..597d4988e2b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest @@ -233,7 +234,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "DataFactoryManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py index a45e6ed5bca..ccfa35e15c7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._activity_runs_operations import build_query_by_pipeline_run_request if sys.version_info >= (3, 9): @@ -173,7 +171,6 @@ async def query_by_pipeline_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -187,7 +184,7 @@ async def query_by_pipeline_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_change_data_capture_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_change_data_capture_operations.py index bf52dd3a28e..8b8fabbfbe2 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_change_data_capture_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_change_data_capture_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._change_data_capture_operations import ( build_create_or_update_request, build_delete_request, @@ -108,7 +106,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -124,7 +121,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -287,7 +283,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -301,7 +296,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -357,7 +352,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -371,7 +365,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -417,7 +411,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -473,7 +466,6 @@ async def start( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -529,7 +521,6 @@ async def stop( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -585,7 +576,6 @@ async def status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -599,7 +589,7 @@ async def status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("str", pipeline_response) + deserialized = self._deserialize("str", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_credential_operations_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_credential_operations_operations.py index 345cf3b20aa..894ca06edda 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_credential_operations_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_credential_operations_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._credential_operations_operations import ( build_create_or_update_request, build_delete_request, @@ -104,7 +102,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -120,7 +117,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -279,7 +275,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -293,7 +288,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -349,7 +344,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -365,7 +359,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -411,7 +405,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py index f3e4c2ccbc7..7ac3b03f24e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._data_flow_debug_session_operations import ( build_add_data_flow_request, build_create_request, @@ -73,7 +73,7 @@ async def _create_initial( factory_name: str, request: Union[_models.CreateDataFlowDebugSessionRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CreateDataFlowDebugSessionResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -87,7 +87,7 @@ async def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -108,10 +108,10 @@ async def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -119,17 +119,19 @@ async def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) - if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -235,10 +237,11 @@ async def begin_create( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -301,7 +304,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -317,7 +319,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -453,7 +454,6 @@ async def add_data_flow( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -467,7 +467,7 @@ async def add_data_flow( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -581,7 +581,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -604,7 +603,7 @@ async def _execute_command_initial( factory_name: str, request: Union[_models.DataFlowDebugCommandRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.DataFlowDebugCommandResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -618,7 +617,7 @@ async def _execute_command_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataFlowDebugCommandResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -639,10 +638,10 @@ async def _execute_command_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -650,17 +649,19 @@ async def _execute_command_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) - if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -766,10 +767,11 @@ async def begin_execute_command( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py index d8c02e74412..e884f876bb4 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._data_flows_operations import ( build_create_or_update_request, build_delete_request, @@ -194,7 +192,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -208,7 +205,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -264,7 +261,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -278,7 +274,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -324,7 +320,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -381,7 +376,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -397,7 +391,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py index 2bd7d817a9e..9877c258645 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._datasets_operations import ( build_create_or_update_request, build_delete_request, @@ -103,7 +101,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -119,7 +116,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -278,7 +274,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -292,7 +287,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -348,7 +343,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -364,7 +358,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -410,7 +404,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py index fc55ec9c003..9378b4cbc2d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._exposure_control_operations import ( build_get_feature_value_by_factory_request, build_get_feature_value_request, @@ -158,7 +156,6 @@ async def get_feature_value( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -172,7 +169,7 @@ async def get_feature_value( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -287,7 +284,6 @@ async def get_feature_value_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -301,7 +297,7 @@ async def get_feature_value_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -419,7 +415,6 @@ async def query_feature_values_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -433,7 +428,7 @@ async def query_feature_values_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py index 141ac5bfd89..909d7e58b3f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._factories_operations import ( build_configure_factory_repo_request, build_create_or_update_request, @@ -100,7 +98,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -116,7 +113,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -234,7 +230,6 @@ async def configure_factory_repo( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -248,7 +243,7 @@ async def configure_factory_repo( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -289,7 +284,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -305,7 +299,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -454,7 +447,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -468,7 +460,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -583,7 +575,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -597,7 +588,7 @@ async def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -645,7 +636,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -661,7 +651,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -704,7 +694,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -829,7 +818,6 @@ async def get_git_hub_access_token( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -843,7 +831,7 @@ async def get_git_hub_access_token( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response) + deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -957,7 +945,6 @@ async def get_data_plane_access( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -971,7 +958,7 @@ async def get_data_plane_access( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AccessPolicyResponse", pipeline_response) + deserialized = self._deserialize("AccessPolicyResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py index 3ba8eb0e8fc..e65591c4497 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_global_parameters_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._global_parameters_operations import ( build_create_or_update_request, build_delete_request, @@ -105,7 +103,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -121,7 +118,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -189,7 +185,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -203,7 +198,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -327,7 +322,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -341,7 +335,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -387,7 +381,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py index ad6d4e6ce27..491a0c27900 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._integration_runtime_nodes_operations import ( build_delete_request, build_get_ip_address_request, @@ -103,7 +101,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -117,7 +114,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -166,7 +163,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -315,7 +311,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -329,7 +324,7 @@ async def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -378,7 +373,6 @@ async def get_ip_address( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -392,7 +386,7 @@ async def get_ip_address( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index 398280e5e34..12eae02fd3a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -16,19 +16,19 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._integration_runtime_object_metadata_operations import build_get_request, build_refresh_request if sys.version_info >= (3, 9): @@ -60,7 +60,7 @@ def __init__(self, *args, **kwargs) -> None: async def _refresh_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.SsisObjectMetadataStatusResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -73,7 +73,7 @@ async def _refresh_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_refresh_request( resource_group_name=resource_group_name, @@ -84,10 +84,10 @@ async def _refresh_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -95,12 +95,14 @@ async def _refresh_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -144,10 +146,11 @@ async def begin_refresh( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -295,7 +298,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -309,7 +311,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py index ed69ede06e6..1c5791abd67 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._integration_runtimes_operations import ( build_create_linked_integration_runtime_request, build_create_or_update_request, @@ -120,7 +120,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -136,7 +135,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -299,7 +297,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -313,7 +310,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -369,7 +366,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -385,7 +381,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -513,7 +509,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -527,7 +522,7 @@ async def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -573,7 +568,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -629,7 +623,6 @@ async def get_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -643,7 +636,7 @@ async def get_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -691,7 +684,6 @@ async def list_outbound_network_dependencies_endpoints( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -706,7 +698,7 @@ async def list_outbound_network_dependencies_endpoints( # pylint: disable=name- raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize( - "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response.http_response ) if cls: @@ -754,7 +746,6 @@ async def get_connection_info( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -768,7 +759,7 @@ async def get_connection_info( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -897,7 +888,6 @@ async def regenerate_auth_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -911,7 +901,7 @@ async def regenerate_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -957,7 +947,6 @@ async def list_auth_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -971,7 +960,7 @@ async def list_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -980,7 +969,7 @@ async def list_auth_keys( async def _start_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.IntegrationRuntimeStatusResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -993,7 +982,7 @@ async def _start_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1004,10 +993,10 @@ async def _start_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1015,12 +1004,14 @@ async def _start_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1064,10 +1055,11 @@ async def begin_start( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1089,9 +1081,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _stop_initial( # pylint: disable=inconsistent-return-statements + async def _stop_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1104,7 +1096,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -1115,10 +1107,10 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1126,11 +1118,19 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_stop( @@ -1157,7 +1157,7 @@ async def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( # type: ignore + raw_result = await self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -1167,6 +1167,7 @@ async def begin_stop( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1230,7 +1231,6 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1287,7 +1287,6 @@ async def get_monitoring_data( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1301,7 +1300,7 @@ async def get_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1347,7 +1346,6 @@ async def upgrade( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1488,7 +1486,6 @@ async def remove_links( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1628,7 +1625,6 @@ async def create_linked_integration_runtime( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1642,7 +1638,7 @@ async def create_linked_integration_runtime( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py index a31e4b4db36..6f20219541a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._linked_services_operations import ( build_create_or_update_request, build_delete_request, @@ -105,7 +103,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -121,7 +118,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -283,7 +279,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -297,7 +292,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -353,7 +348,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -369,7 +363,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -415,7 +409,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py index f09e7b1641e..da3e687c4de 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._managed_private_endpoints_operations import ( build_create_or_update_request, build_delete_request, @@ -108,7 +106,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -124,7 +121,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -297,7 +293,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -311,7 +306,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -371,7 +366,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -385,7 +379,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -439,7 +433,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py index 4a4f128d4ba..d3932703922 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._managed_virtual_networks_operations import ( build_create_or_update_request, build_get_request, @@ -104,7 +102,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -120,7 +117,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -283,7 +279,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -297,7 +292,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -353,7 +348,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -367,7 +361,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py index a848d37430c..d5b7a449d08 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request if sys.version_info >= (3, 9): @@ -87,7 +85,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -103,7 +100,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py index c6ae1b85810..136dd4e28d0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._pipeline_runs_operations import ( build_cancel_request, build_get_request, @@ -167,7 +165,6 @@ async def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -181,7 +178,7 @@ async def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -225,7 +222,6 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, ** headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -239,7 +235,7 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, ** map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRun", pipeline_response) + deserialized = self._deserialize("PipelineRun", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -294,7 +290,6 @@ async def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py index d5e054e6815..1d7c423bdff 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._pipelines_operations import ( build_create_or_update_request, build_create_run_request, @@ -106,7 +104,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -122,7 +119,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -281,7 +277,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -295,7 +290,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -351,7 +346,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -367,7 +361,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -413,7 +407,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -611,7 +604,6 @@ async def create_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -625,7 +617,7 @@ async def create_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CreateRunResponse", pipeline_response) + deserialized = self._deserialize("CreateRunResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py index aaa8d9cb39c..4448a257ebc 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_end_point_connections_operations import build_list_by_factory_request if sys.version_info >= (3, 9): @@ -98,7 +96,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -114,7 +111,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py index d444ea0bf9d..0708068b34b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_endpoint_connection_operations import ( build_create_or_update_request, build_delete_request, @@ -195,7 +193,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -209,7 +206,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -265,7 +262,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -279,7 +275,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -325,7 +321,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py index deefa40fd5b..0a374306c49 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_get_request if sys.version_info >= (3, 9): @@ -91,7 +89,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -105,7 +102,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py index bf3baef1b99..83963ccb521 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._trigger_runs_operations import ( build_cancel_request, build_query_by_factory_request, @@ -102,7 +100,6 @@ async def rerun( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -161,7 +158,6 @@ async def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -285,7 +281,6 @@ async def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -299,7 +294,7 @@ async def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py index 98cc206b7c9..a5f24c34622 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -18,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._triggers_operations import ( build_create_or_update_request, build_delete_request, @@ -111,7 +111,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -127,7 +126,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -263,7 +261,6 @@ async def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -277,7 +274,7 @@ async def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -414,7 +411,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -428,7 +424,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -484,7 +480,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -500,7 +495,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -546,7 +541,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -565,7 +559,7 @@ async def delete( # pylint: disable=inconsistent-return-statements async def _subscribe_to_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -578,7 +572,7 @@ async def _subscribe_to_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_subscribe_to_events_request( resource_group_name=resource_group_name, @@ -589,10 +583,10 @@ async def _subscribe_to_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -600,12 +594,14 @@ async def _subscribe_to_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -649,10 +645,11 @@ async def begin_subscribe_to_events( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -713,7 +710,6 @@ async def get_event_subscription_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -727,7 +723,7 @@ async def get_event_subscription_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -736,7 +732,7 @@ async def get_event_subscription_status( async def _unsubscribe_from_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -749,7 +745,7 @@ async def _unsubscribe_from_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_unsubscribe_from_events_request( resource_group_name=resource_group_name, @@ -760,10 +756,10 @@ async def _unsubscribe_from_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -771,12 +767,14 @@ async def _unsubscribe_from_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -820,10 +818,11 @@ async def begin_unsubscribe_from_events( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -845,9 +844,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _start_initial( # pylint: disable=inconsistent-return-statements + async def _start_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -860,7 +859,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -871,10 +870,10 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -882,11 +881,19 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_start( @@ -913,7 +920,7 @@ async def begin_start( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( # type: ignore + raw_result = await self._start_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -923,6 +930,7 @@ async def begin_start( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -944,9 +952,9 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _stop_initial( # pylint: disable=inconsistent-return-statements + async def _stop_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -959,7 +967,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -970,10 +978,10 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -981,11 +989,19 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_stop( @@ -1012,7 +1028,7 @@ async def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( # type: ignore + raw_result = await self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -1022,6 +1038,7 @@ async def begin_stop( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 53a0c4e851d..5616a16fc7f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -119,11 +119,13 @@ from ._models_py3 import AzureSqlSource from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService +from ._models_py3 import AzureStorageLinkedServiceTypeProperties from ._models_py3 import AzureSynapseArtifactsLinkedService from ._models_py3 import AzureTableDataset from ._models_py3 import AzureTableSink from ._models_py3 import AzureTableSource from ._models_py3 import AzureTableStorageLinkedService +from ._models_py3 import AzureTableStorageLinkedServiceTypeProperties from ._models_py3 import BigDataPoolParametrizationReference from ._models_py3 import BinaryDataset from ._models_py3 import BinaryReadSettings @@ -153,6 +155,7 @@ from ._models_py3 import ConcurObjectDataset from ._models_py3 import ConcurSource from ._models_py3 import ConnectionStateProperties +from ._models_py3 import ContinuationSettingsReference from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity from ._models_py3 import CopyActivityLogSettings @@ -1037,11 +1040,13 @@ "AzureSqlSource", "AzureSqlTableDataset", "AzureStorageLinkedService", + "AzureStorageLinkedServiceTypeProperties", "AzureSynapseArtifactsLinkedService", "AzureTableDataset", "AzureTableSink", "AzureTableSource", "AzureTableStorageLinkedService", + "AzureTableStorageLinkedServiceTypeProperties", "BigDataPoolParametrizationReference", "BinaryDataset", "BinaryReadSettings", @@ -1071,6 +1076,7 @@ "ConcurObjectDataset", "ConcurSource", "ConnectionStateProperties", + "ContinuationSettingsReference", "ControlActivity", "CopyActivity", "CopyActivityLogSettings", diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index 5f07e7eab04..bc478c060a6 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -274,6 +274,7 @@ class DynamicsAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OFFICE365 = "Office365" IFD = "Ifd" AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" + ACTIVE_DIRECTORY = "Active Directory" class DynamicsDeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -343,10 +344,11 @@ class GlobalParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OBJECT = "Object" STRING = "String" - INT_ENUM = "Int" + INT = "Int" FLOAT = "Float" BOOL = "Bool" ARRAY = "Array" + INT_ENUM = "Int" class GoogleAdWordsAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -588,9 +590,10 @@ class NotebookParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Notebook parameter type.""" STRING = "string" - INT_ENUM = "int" + INT = "int" FLOAT = "float" BOOL = "bool" + INT_ENUM = "int" class NotebookReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -638,11 +641,12 @@ class ParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OBJECT = "Object" STRING = "String" - INT_ENUM = "Int" + INT = "Int" FLOAT = "Float" BOOL = "Bool" ARRAY = "Array" SECURE_STRING = "SecureString" + INT_ENUM = "Int" class PhoenixAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -728,8 +732,9 @@ class RunQueryFilterOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): EQUALS = "Equals" NOT_EQUALS = "NotEquals" - IN_ENUM = "In" + IN = "In" NOT_IN = "NotIn" + IN_ENUM = "In" class RunQueryOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -964,6 +969,7 @@ class SqlServerAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): SQL = "SQL" WINDOWS = "Windows" + USER_ASSIGNED_MANAGED_IDENTITY = "UserAssignedManagedIdentity" class SqlWriteBehaviorEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -1002,12 +1008,13 @@ class StoredProcedureParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta) """Stored procedure parameter type.""" STRING = "String" - INT_ENUM = "Int" + INT = "Int" INT64 = "Int64" DECIMAL = "Decimal" GUID = "Guid" BOOLEAN = "Boolean" DATE = "Date" + INT_ENUM = "Int" class SybaseAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index c0b5f7a60d6..6415cfc305b 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -511,6 +511,8 @@ class LinkedService(_serialization.Model): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -528,6 +530,7 @@ class LinkedService(_serialization.Model): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -664,6 +667,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -674,6 +678,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -686,6 +692,7 @@ def __init__( super().__init__(**kwargs) self.additional_properties = additional_properties self.type: Optional[str] = None + self.version = version self.connect_via = connect_via self.description = description self.parameters = parameters @@ -702,6 +709,8 @@ class AmazonMWSLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -750,6 +759,7 @@ class AmazonMWSLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -774,6 +784,7 @@ def __init__( seller_id: JSON, access_key_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -790,6 +801,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -829,6 +842,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -1603,6 +1617,8 @@ class AmazonRdsForOracleLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -1629,6 +1645,7 @@ class AmazonRdsForOracleLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -1643,6 +1660,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -1655,6 +1673,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -1674,6 +1694,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -1986,6 +2007,8 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): # pylint: disable=too- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -2099,6 +2122,7 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): # pylint: disable=too- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -2137,6 +2161,7 @@ def __init__( # pylint: disable=too-many-locals self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -2172,6 +2197,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -2280,6 +2307,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -3115,6 +3143,8 @@ class AmazonRedshiftLinkedService(LinkedService): # pylint: disable=too-many-in :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -3151,6 +3181,7 @@ class AmazonRedshiftLinkedService(LinkedService): # pylint: disable=too-many-in _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -3169,6 +3200,7 @@ def __init__( server: JSON, database: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -3183,6 +3215,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -3211,6 +3245,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -3469,6 +3504,8 @@ class AmazonS3CompatibleLinkedService(LinkedService): # pylint: disable=too-man :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -3503,6 +3540,7 @@ class AmazonS3CompatibleLinkedService(LinkedService): # pylint: disable=too-man _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -3518,6 +3556,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -3533,6 +3572,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -3561,6 +3602,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -4130,6 +4172,8 @@ class AmazonS3LinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -4165,6 +4209,7 @@ class AmazonS3LinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -4181,6 +4226,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -4197,6 +4243,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -4226,6 +4274,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -4686,6 +4735,8 @@ class AppFiguresLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -4713,6 +4764,7 @@ class AppFiguresLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -4729,6 +4781,7 @@ def __init__( password: "_models.SecretBase", client_key: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -4739,6 +4792,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -4757,6 +4812,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -4802,6 +4858,8 @@ class AsanaLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -4825,6 +4883,7 @@ class AsanaLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -4838,6 +4897,7 @@ def __init__( *, api_token: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -4849,6 +4909,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -4865,6 +4927,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -5650,6 +5713,8 @@ class AzureBatchLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -5689,6 +5754,7 @@ class AzureBatchLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -5710,6 +5776,7 @@ def __init__( pool_name: JSON, linked_service_name: "_models.LinkedServiceReference", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -5723,6 +5790,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -5752,6 +5821,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -6059,6 +6129,8 @@ class AzureBlobFSLinkedService(LinkedService): # pylint: disable=too-many-insta :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -6115,6 +6187,7 @@ class AzureBlobFSLinkedService(LinkedService): # pylint: disable=too-many-insta _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -6137,6 +6210,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -6159,6 +6233,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -6209,6 +6285,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -6826,6 +6903,8 @@ class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -6886,6 +6965,7 @@ class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -6910,6 +6990,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -6934,6 +7015,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -6988,6 +7071,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -7618,6 +7702,8 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): # pylint: disable=t :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -7654,6 +7740,7 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): # pylint: disable=t _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -7671,6 +7758,7 @@ def __init__( *, domain: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -7686,6 +7774,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -7715,6 +7805,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -7942,6 +8033,8 @@ class AzureDatabricksLinkedService(LinkedService): # pylint: disable=too-many-i :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -8026,6 +8119,7 @@ class AzureDatabricksLinkedService(LinkedService): # pylint: disable=too-many-i _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -8056,6 +8150,7 @@ def __init__( # pylint: disable=too-many-locals *, domain: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -8084,6 +8179,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -8161,6 +8258,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -8463,6 +8561,8 @@ class AzureDataExplorerLinkedService(LinkedService): # pylint: disable=too-many :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -8500,6 +8600,7 @@ class AzureDataExplorerLinkedService(LinkedService): # pylint: disable=too-many _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -8518,6 +8619,7 @@ def __init__( endpoint: JSON, database: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -8532,6 +8634,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -8561,6 +8665,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -8922,6 +9027,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): # pylint: disable=too :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -8965,6 +9072,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): # pylint: disable=too _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -8985,6 +9093,7 @@ def __init__( account_name: JSON, tenant: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -9001,6 +9110,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -9036,6 +9147,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -9192,6 +9304,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): # pylint: disable=too-man :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -9240,6 +9354,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): # pylint: disable=too-man _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -9261,6 +9376,7 @@ def __init__( *, data_lake_store_uri: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -9280,6 +9396,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -9321,6 +9439,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -9843,6 +9962,8 @@ class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -9877,6 +9998,11 @@ class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many- :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str + :ivar service_endpoint: File service endpoint of the Azure File Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :vartype service_endpoint: JSON + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -9886,6 +10012,7 @@ class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -9900,12 +10027,15 @@ class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many- "file_share": {"key": "typeProperties.fileShare", "type": "object"}, "snapshot": {"key": "typeProperties.snapshot", "type": "object"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, + "service_endpoint": {"key": "typeProperties.serviceEndpoint", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -9920,12 +10050,16 @@ def __init__( file_share: Optional[JSON] = None, snapshot: Optional[JSON] = None, encrypted_credential: Optional[str] = None, + service_endpoint: Optional[JSON] = None, + credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -9960,9 +10094,15 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str + :keyword service_endpoint: File service endpoint of the Azure File Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :paramtype service_endpoint: JSON + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -9980,6 +10120,8 @@ def __init__( self.file_share = file_share self.snapshot = snapshot self.encrypted_credential = encrypted_credential + self.service_endpoint = service_endpoint + self.credential = credential class AzureFileStorageLocation(DatasetLocation): @@ -10410,6 +10552,8 @@ class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-ins :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -10445,6 +10589,7 @@ class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-ins _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -10462,6 +10607,7 @@ def __init__( *, function_app_url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -10477,6 +10623,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -10505,6 +10653,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -10530,6 +10679,8 @@ class AzureKeyVaultLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -10553,6 +10704,7 @@ class AzureKeyVaultLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -10566,6 +10718,7 @@ def __init__( *, base_url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -10577,6 +10730,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -10593,6 +10748,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -10697,6 +10853,8 @@ class AzureMariaDBLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -10722,6 +10880,7 @@ class AzureMariaDBLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -10735,6 +10894,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -10748,6 +10908,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -10767,6 +10929,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -11320,6 +11483,8 @@ class AzureMLLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -11363,6 +11528,7 @@ class AzureMLLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -11383,6 +11549,7 @@ def __init__( ml_endpoint: JSON, api_key: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -11399,6 +11566,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -11434,6 +11603,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -11461,6 +11631,8 @@ class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-in :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -11506,6 +11678,7 @@ class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-in _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -11527,6 +11700,7 @@ def __init__( resource_group_name: JSON, ml_workspace_name: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -11542,6 +11716,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -11578,6 +11754,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -11783,6 +11960,8 @@ class AzureMySqlLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -11809,6 +11988,7 @@ class AzureMySqlLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -11823,6 +12003,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -11835,6 +12016,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -11854,6 +12037,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -12191,6 +12375,8 @@ class AzurePostgreSqlLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -12216,6 +12402,7 @@ class AzurePostgreSqlLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -12229,6 +12416,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -12242,6 +12430,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -12261,6 +12451,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -12911,6 +13102,8 @@ class AzureSearchLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -12937,6 +13130,7 @@ class AzureSearchLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -12951,6 +13145,7 @@ def __init__( *, url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -12963,6 +13158,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -12982,6 +13179,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -13004,6 +13202,8 @@ class AzureSqlDatabaseLinkedService(LinkedService): # pylint: disable=too-many- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -13142,6 +13342,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): # pylint: disable=too-many- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -13187,6 +13388,7 @@ def __init__( # pylint: disable=too-many-locals self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -13229,6 +13431,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -13362,6 +13566,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -13774,6 +13979,8 @@ class AzureSqlDWLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -13910,6 +14117,7 @@ class AzureSqlDWLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -13951,6 +14159,7 @@ def __init__( # pylint: disable=too-many-locals self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -13992,6 +14201,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -14122,6 +14333,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -14647,6 +14859,8 @@ class AzureSqlMILinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -14785,6 +14999,7 @@ class AzureSqlMILinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -14830,6 +15045,7 @@ def __init__( # pylint: disable=too-many-locals self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -14872,6 +15088,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -15005,6 +15223,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -15991,6 +16210,8 @@ class AzureStorageLinkedService(LinkedService): # pylint: disable=too-many-inst :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -16021,6 +16242,7 @@ class AzureStorageLinkedService(LinkedService): # pylint: disable=too-many-inst _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -16036,6 +16258,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -16051,6 +16274,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -16075,6 +16300,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -16089,6 +16315,65 @@ def __init__( self.encrypted_credential = encrypted_credential +class AzureStorageLinkedServiceTypeProperties(_serialization.Model): + """Azure Storage linked service properties. + + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :vartype connection_string: JSON + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :vartype sas_uri: JSON + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str + """ + + _attribute_map = { + "connection_string": {"key": "connectionString", "type": "object"}, + "account_key": {"key": "accountKey", "type": "AzureKeyVaultSecretReference"}, + "sas_uri": {"key": "sasUri", "type": "object"}, + "sas_token": {"key": "sasToken", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "encryptedCredential", "type": "str"}, + } + + def __init__( + self, + *, + connection_string: Optional[JSON] = None, + account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[JSON] = None, + sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: JSON + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: JSON + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str + """ + super().__init__(**kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + + class AzureSynapseArtifactsLinkedService(LinkedService): """Azure Synapse Analytics (Artifacts) linked service. @@ -16099,6 +16384,8 @@ class AzureSynapseArtifactsLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -16127,6 +16414,7 @@ class AzureSynapseArtifactsLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -16141,6 +16429,7 @@ def __init__( *, endpoint: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -16153,6 +16442,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -16174,6 +16465,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -16541,6 +16833,8 @@ class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -16562,6 +16856,11 @@ class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str + :ivar service_endpoint: Table service endpoint of the Azure Table Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :vartype service_endpoint: JSON + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -16571,6 +16870,7 @@ class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -16580,12 +16880,15 @@ class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many "sas_uri": {"key": "typeProperties.sasUri", "type": "object"}, "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, + "service_endpoint": {"key": "typeProperties.serviceEndpoint", "type": "object"}, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -16595,12 +16898,16 @@ def __init__( sas_uri: Optional[JSON] = None, sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, + service_endpoint: Optional[JSON] = None, + credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -16622,9 +16929,15 @@ def __init__( :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str + :keyword service_endpoint: Table service endpoint of the Azure Table Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :paramtype service_endpoint: JSON + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -16637,6 +16950,87 @@ def __init__( self.sas_uri = sas_uri self.sas_token = sas_token self.encrypted_credential = encrypted_credential + self.service_endpoint = service_endpoint + self.credential = credential + + +class AzureTableStorageLinkedServiceTypeProperties( + AzureStorageLinkedServiceTypeProperties +): # pylint: disable=name-too-long + """Azure Table Storage linked service properties. + + :ivar connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :vartype connection_string: JSON + :ivar account_key: The Azure key vault secret reference of accountKey in connection string. + :vartype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :vartype sas_uri: JSON + :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. + :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str + :ivar service_endpoint: Table service endpoint of the Azure Table Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :vartype service_endpoint: JSON + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ + + _attribute_map = { + "connection_string": {"key": "connectionString", "type": "object"}, + "account_key": {"key": "accountKey", "type": "AzureKeyVaultSecretReference"}, + "sas_uri": {"key": "sasUri", "type": "object"}, + "sas_token": {"key": "sasToken", "type": "AzureKeyVaultSecretReference"}, + "encrypted_credential": {"key": "encryptedCredential", "type": "str"}, + "service_endpoint": {"key": "serviceEndpoint", "type": "object"}, + "credential": {"key": "credential", "type": "CredentialReference"}, + } + + def __init__( + self, + *, + connection_string: Optional[JSON] = None, + account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[JSON] = None, + sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[str] = None, + service_endpoint: Optional[JSON] = None, + credential: Optional["_models.CredentialReference"] = None, + **kwargs: Any + ) -> None: + """ + :keyword connection_string: The connection string. It is mutually exclusive with sasUri + property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype connection_string: JSON + :keyword account_key: The Azure key vault secret reference of accountKey in connection string. + :paramtype account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :paramtype sas_uri: JSON + :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. + :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str + :keyword service_endpoint: Table service endpoint of the Azure Table Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :paramtype service_endpoint: JSON + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + """ + super().__init__( + connection_string=connection_string, + account_key=account_key, + sas_uri=sas_uri, + sas_token=sas_token, + encrypted_credential=encrypted_credential, + **kwargs + ) + self.service_endpoint = service_endpoint + self.credential = credential class BigDataPoolParametrizationReference(_serialization.Model): @@ -17666,6 +18060,8 @@ class CassandraLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -17700,6 +18096,7 @@ class CassandraLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -17717,6 +18114,7 @@ def __init__( *, host: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -17732,6 +18130,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -17760,6 +18160,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -18537,6 +18938,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=t :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -18569,9 +18972,13 @@ class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=t :vartype organization_name: JSON :ivar authentication_type: The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Required. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario, 'Active + Directory' for Dynamics on-premises with IFD. Type: string (or Expression with resultType + string). Required. :vartype authentication_type: JSON + :ivar domain: The Active Directory domain that will verify user credentials. Type: string (or + Expression with resultType string). + :vartype domain: JSON :ivar username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :vartype username: JSON @@ -18604,6 +19011,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=t _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -18614,6 +19022,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=t "service_uri": {"key": "typeProperties.serviceUri", "type": "object"}, "organization_name": {"key": "typeProperties.organizationName", "type": "object"}, "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "domain": {"key": "typeProperties.domain", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, @@ -18628,6 +19037,7 @@ def __init__( deployment_type: JSON, authentication_type: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -18636,6 +19046,7 @@ def __init__( port: Optional[JSON] = None, service_uri: Optional[JSON] = None, organization_name: Optional[JSON] = None, + domain: Optional[JSON] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, service_principal_id: Optional[JSON] = None, @@ -18648,6 +19059,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -18680,9 +19093,13 @@ def __init__( :paramtype organization_name: JSON :keyword authentication_type: The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Required. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario, 'Active + Directory' for Dynamics on-premises with IFD. Type: string (or Expression with resultType + string). Required. :paramtype authentication_type: JSON + :keyword domain: The Active Directory domain that will verify user credentials. Type: string + (or Expression with resultType string). + :paramtype domain: JSON :keyword username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :paramtype username: JSON @@ -18707,6 +19124,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -18720,6 +19138,7 @@ def __init__( self.service_uri = service_uri self.organization_name = organization_name self.authentication_type = authentication_type + self.domain = domain self.username = username self.password = password self.service_principal_id = service_principal_id @@ -19034,6 +19453,8 @@ class ConcurLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -19076,6 +19497,7 @@ class ConcurLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -19096,6 +19518,7 @@ def __init__( client_id: JSON, username: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -19112,6 +19535,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -19146,6 +19571,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -19400,6 +19826,45 @@ def __init__(self, **kwargs: Any) -> None: self.status = None +class ContinuationSettingsReference(_serialization.Model): + """Continuation settings for execute data flow activity. + + :ivar continuation_ttl_in_minutes: Continuation TTL in minutes. + :vartype continuation_ttl_in_minutes: JSON + :ivar idle_condition: Idle condition. + :vartype idle_condition: JSON + :ivar customized_checkpoint_key: Customized checkpoint key. + :vartype customized_checkpoint_key: JSON + """ + + _attribute_map = { + "continuation_ttl_in_minutes": {"key": "continuationTtlInMinutes", "type": "object"}, + "idle_condition": {"key": "idleCondition", "type": "object"}, + "customized_checkpoint_key": {"key": "customizedCheckpointKey", "type": "object"}, + } + + def __init__( + self, + *, + continuation_ttl_in_minutes: Optional[JSON] = None, + idle_condition: Optional[JSON] = None, + customized_checkpoint_key: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword continuation_ttl_in_minutes: Continuation TTL in minutes. + :paramtype continuation_ttl_in_minutes: JSON + :keyword idle_condition: Idle condition. + :paramtype idle_condition: JSON + :keyword customized_checkpoint_key: Customized checkpoint key. + :paramtype customized_checkpoint_key: JSON + """ + super().__init__(**kwargs) + self.continuation_ttl_in_minutes = continuation_ttl_in_minutes + self.idle_condition = idle_condition + self.customized_checkpoint_key = customized_checkpoint_key + + class CopyActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Copy activity. @@ -19773,6 +20238,8 @@ class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -19829,6 +20296,7 @@ class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -19851,6 +20319,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -19873,6 +20342,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -19924,6 +20395,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -20060,6 +20532,8 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -20089,6 +20563,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -20104,6 +20579,7 @@ def __init__( connection_string: JSON, database: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -20115,6 +20591,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -20136,6 +20614,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -20711,6 +21190,8 @@ class CouchbaseLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -20736,6 +21217,7 @@ class CouchbaseLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -20749,6 +21231,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -20762,6 +21245,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -20781,6 +21266,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -21645,6 +22131,8 @@ class CustomDataSourceLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -21665,6 +22153,7 @@ class CustomDataSourceLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -21677,6 +22166,7 @@ def __init__( *, type_properties: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -21687,6 +22177,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -21700,6 +22192,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -23594,6 +24087,8 @@ class DataworldLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -23617,6 +24112,7 @@ class DataworldLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -23630,6 +24126,7 @@ def __init__( *, api_token: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -23641,6 +24138,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -23657,6 +24156,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -23678,6 +24178,8 @@ class Db2LinkedService(LinkedService): # pylint: disable=too-many-instance-attr :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -23724,6 +24226,7 @@ class Db2LinkedService(LinkedService): # pylint: disable=too-many-instance-attr _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -23743,6 +24246,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -23762,6 +24266,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -23802,6 +24308,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -25151,6 +25658,8 @@ class DrillLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -25176,6 +25685,7 @@ class DrillLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -25189,6 +25699,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -25202,6 +25713,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -25221,6 +25734,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -25538,6 +26052,8 @@ class DynamicsAXLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -25580,6 +26096,7 @@ class DynamicsAXLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -25601,6 +26118,7 @@ def __init__( tenant: JSON, aad_resource_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -25612,6 +26130,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -25643,6 +26163,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -25992,6 +26513,8 @@ class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-insta :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -26020,9 +26543,12 @@ class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-insta :vartype organization_name: JSON :ivar authentication_type: The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Required. + for Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics + on-premises with IFD. Type: string (or Expression with resultType string). Required. :vartype authentication_type: JSON + :ivar domain: The Active Directory domain that will verify user credentials. Type: string (or + Expression with resultType string). + :vartype domain: JSON :ivar username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :vartype username: JSON @@ -26057,6 +26583,7 @@ class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-insta _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -26067,6 +26594,7 @@ class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-insta "service_uri": {"key": "typeProperties.serviceUri", "type": "object"}, "organization_name": {"key": "typeProperties.organizationName", "type": "object"}, "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "domain": {"key": "typeProperties.domain", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, @@ -26082,6 +26610,7 @@ def __init__( deployment_type: JSON, authentication_type: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -26090,6 +26619,7 @@ def __init__( port: Optional[JSON] = None, service_uri: Optional[JSON] = None, organization_name: Optional[JSON] = None, + domain: Optional[JSON] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, service_principal_id: Optional[JSON] = None, @@ -26103,6 +26633,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -26132,9 +26664,12 @@ def __init__( :paramtype organization_name: JSON :keyword authentication_type: The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Required. + for Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics + on-premises with IFD. Type: string (or Expression with resultType string). Required. :paramtype authentication_type: JSON + :keyword domain: The Active Directory domain that will verify user credentials. Type: string + (or Expression with resultType string). + :paramtype domain: JSON :keyword username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :paramtype username: JSON @@ -26161,6 +26696,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -26174,6 +26710,7 @@ def __init__( self.service_uri = service_uri self.organization_name = organization_name self.authentication_type = authentication_type + self.domain = domain self.username = username self.password = password self.service_principal_id = service_principal_id @@ -26511,6 +27048,8 @@ class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -26539,9 +27078,12 @@ class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype organization_name: JSON :ivar authentication_type: The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for - Server-To-Server authentication in online scenario. Type: string (or Expression with resultType - string). Required. + Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics on-premises + with IFD. Type: string (or Expression with resultType string). Required. :vartype authentication_type: JSON + :ivar domain: The Active Directory domain that will verify user credentials. Type: string (or + Expression with resultType string). + :vartype domain: JSON :ivar username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :vartype username: JSON @@ -26576,6 +27118,7 @@ class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -26586,6 +27129,7 @@ class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance "service_uri": {"key": "typeProperties.serviceUri", "type": "object"}, "organization_name": {"key": "typeProperties.organizationName", "type": "object"}, "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, + "domain": {"key": "typeProperties.domain", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, @@ -26601,6 +27145,7 @@ def __init__( deployment_type: JSON, authentication_type: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -26609,6 +27154,7 @@ def __init__( port: Optional[JSON] = None, service_uri: Optional[JSON] = None, organization_name: Optional[JSON] = None, + domain: Optional[JSON] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, service_principal_id: Optional[JSON] = None, @@ -26622,6 +27168,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -26650,9 +27198,12 @@ def __init__( :paramtype organization_name: JSON :keyword authentication_type: The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Required. + for Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics + on-premises with IFD. Type: string (or Expression with resultType string). Required. :paramtype authentication_type: JSON + :keyword domain: The Active Directory domain that will verify user credentials. Type: string + (or Expression with resultType string). + :paramtype domain: JSON :keyword username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :paramtype username: JSON @@ -26679,6 +27230,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -26692,6 +27244,7 @@ def __init__( self.service_uri = service_uri self.organization_name = organization_name self.authentication_type = authentication_type + self.domain = domain self.username = username self.password = password self.service_principal_id = service_principal_id @@ -26925,6 +27478,8 @@ class EloquaLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -26964,6 +27519,7 @@ class EloquaLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -26983,6 +27539,7 @@ def __init__( endpoint: JSON, username: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -26998,6 +27555,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -27029,6 +27588,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -27661,6 +28221,8 @@ class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-in :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -27698,6 +28260,10 @@ class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-in "data_flow": {"key": "typeProperties.dataFlow", "type": "DataFlowReference"}, "staging": {"key": "typeProperties.staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "typeProperties.integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": { + "key": "typeProperties.continuationSettings", + "type": "ContinuationSettingsReference", + }, "compute": {"key": "typeProperties.compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "typeProperties.traceLevel", "type": "object"}, "continue_on_error": {"key": "typeProperties.continueOnError", "type": "object"}, @@ -27720,6 +28286,7 @@ def __init__( policy: Optional["_models.ActivityPolicy"] = None, staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -27756,6 +28323,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -27788,6 +28357,7 @@ def __init__( self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime + self.continuation_settings = continuation_settings self.compute = compute self.trace_level = trace_level self.continue_on_error = continue_on_error @@ -27806,6 +28376,8 @@ class ExecuteDataFlowActivityTypeProperties(_serialization.Model): :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -27831,6 +28403,7 @@ class ExecuteDataFlowActivityTypeProperties(_serialization.Model): "data_flow": {"key": "dataFlow", "type": "DataFlowReference"}, "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": {"key": "continuationSettings", "type": "ContinuationSettingsReference"}, "compute": {"key": "compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "traceLevel", "type": "object"}, "continue_on_error": {"key": "continueOnError", "type": "object"}, @@ -27844,6 +28417,7 @@ def __init__( data_flow: "_models.DataFlowReference", staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -27858,6 +28432,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -27878,6 +28454,7 @@ def __init__( self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime + self.continuation_settings = continuation_settings self.compute = compute self.trace_level = trace_level self.continue_on_error = continue_on_error @@ -28075,7 +28652,9 @@ def __init__( self.secure_input = secure_input -class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypeProperties): +class ExecutePowerQueryActivityTypeProperties( + ExecuteDataFlowActivityTypeProperties +): # pylint: disable=too-many-instance-attributes """Execute power query data flow activity properties. All required parameters must be populated in order to send to server. @@ -28086,6 +28665,8 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -28116,6 +28697,7 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert "data_flow": {"key": "dataFlow", "type": "DataFlowReference"}, "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": {"key": "continuationSettings", "type": "ContinuationSettingsReference"}, "compute": {"key": "compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "traceLevel", "type": "object"}, "continue_on_error": {"key": "continueOnError", "type": "object"}, @@ -28131,6 +28713,7 @@ def __init__( data_flow: "_models.DataFlowReference", staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -28147,6 +28730,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -28172,6 +28757,7 @@ def __init__( data_flow=data_flow, staging=staging, integration_runtime=integration_runtime, + continuation_settings=continuation_settings, compute=compute, trace_level=trace_level, continue_on_error=continue_on_error, @@ -28424,6 +29010,8 @@ class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-in :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -28465,6 +29053,10 @@ class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-in "data_flow": {"key": "typeProperties.dataFlow", "type": "DataFlowReference"}, "staging": {"key": "typeProperties.staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "typeProperties.integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": { + "key": "typeProperties.continuationSettings", + "type": "ContinuationSettingsReference", + }, "compute": {"key": "typeProperties.compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "typeProperties.traceLevel", "type": "object"}, "continue_on_error": {"key": "typeProperties.continueOnError", "type": "object"}, @@ -28488,6 +29080,7 @@ def __init__( policy: Optional["_models.ActivityPolicy"] = None, staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -28524,6 +29117,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -28560,6 +29155,7 @@ def __init__( self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime + self.continuation_settings = continuation_settings self.compute = compute self.trace_level = trace_level self.continue_on_error = continue_on_error @@ -29483,7 +30079,7 @@ def __init__( self.error_code = error_code -class FileServerLinkedService(LinkedService): +class FileServerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """File system linked service. All required parameters must be populated in order to send to server. @@ -29493,6 +30089,8 @@ class FileServerLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -29522,6 +30120,7 @@ class FileServerLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -29537,6 +30136,7 @@ def __init__( *, host: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -29550,6 +30150,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -29572,6 +30174,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -30666,6 +31269,8 @@ class FtpServerLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -30708,6 +31313,7 @@ class FtpServerLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -30730,6 +31336,7 @@ def __init__( *, host: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -30747,6 +31354,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -30782,6 +31391,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -31219,7 +31829,7 @@ class GlobalParameterSpecification(_serialization.Model): All required parameters must be populated in order to send to server. :ivar type: Global Parameter type. Required. Known values are: "Object", "String", "Int", - "Float", "Bool", and "Array". + "Float", "Bool", "Array", and "Int". :vartype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :ivar value: Value of parameter. Required. :vartype value: JSON @@ -31238,7 +31848,7 @@ class GlobalParameterSpecification(_serialization.Model): def __init__(self, *, type: Union[str, "_models.GlobalParameterType"], value: JSON, **kwargs: Any) -> None: """ :keyword type: Global Parameter type. Required. Known values are: "Object", "String", "Int", - "Float", "Bool", and "Array". + "Float", "Bool", "Array", and "Int". :paramtype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :keyword value: Value of parameter. Required. :paramtype value: JSON @@ -31258,6 +31868,8 @@ class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-ins :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -31333,6 +31945,7 @@ class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-ins _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -31359,6 +31972,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -31385,6 +31999,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -31455,6 +32071,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -31694,6 +32311,8 @@ class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-in :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -31756,6 +32375,7 @@ class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-in _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -31780,6 +32400,7 @@ def __init__( project: JSON, authentication_type: Union[str, "_models.GoogleBigQueryAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -31800,6 +32421,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -31854,6 +32477,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -32109,6 +32733,8 @@ class GoogleBigQueryV2LinkedService(LinkedService): # pylint: disable=too-many- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -32150,6 +32776,7 @@ class GoogleBigQueryV2LinkedService(LinkedService): # pylint: disable=too-many- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -32169,6 +32796,7 @@ def __init__( project_id: JSON, authentication_type: Union[str, "_models.GoogleBigQueryV2AuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -32184,6 +32812,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -32217,6 +32847,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -32448,7 +33079,7 @@ def __init__( self.query = query -class GoogleCloudStorageLinkedService(LinkedService): +class GoogleCloudStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Google Cloud Storage. All required parameters must be populated in order to send to server. @@ -32458,6 +33089,8 @@ class GoogleCloudStorageLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -32489,6 +33122,7 @@ class GoogleCloudStorageLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -32503,6 +33137,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -32517,6 +33152,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -32542,6 +33179,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -32786,6 +33424,8 @@ class GoogleSheetsLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -32809,6 +33449,7 @@ class GoogleSheetsLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -32822,6 +33463,7 @@ def __init__( *, api_token: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -32833,6 +33475,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -32849,6 +33493,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -32870,6 +33515,8 @@ class GreenplumLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -32895,6 +33542,7 @@ class GreenplumLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -32908,6 +33556,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -32921,6 +33570,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -32940,6 +33591,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -33185,6 +33837,8 @@ class HBaseLinkedService(LinkedService): # pylint: disable=too-many-instance-at :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -33235,6 +33889,7 @@ class HBaseLinkedService(LinkedService): # pylint: disable=too-many-instance-at _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -33258,6 +33913,7 @@ def __init__( host: JSON, authentication_type: Union[str, "_models.HBaseAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -33277,6 +33933,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -33320,6 +33978,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -33554,6 +34213,8 @@ class HdfsLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -33586,6 +34247,7 @@ class HdfsLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -33602,6 +34264,7 @@ def __init__( *, url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -33616,6 +34279,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -33641,6 +34306,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -34118,6 +34784,8 @@ class HDInsightLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -34158,6 +34826,7 @@ class HDInsightLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -34180,6 +34849,7 @@ def __init__( *, cluster_uri: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -34197,6 +34867,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -34230,6 +34902,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -34419,6 +35092,8 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -34435,9 +35110,9 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). Required. :vartype time_to_live: JSON - :ivar version: Version of the HDInsight cluster.  Type: string (or Expression with resultType - string). Required. - :vartype version: JSON + :ivar version_type_properties_version: Version of the HDInsight cluster.  Type: string (or + Expression with resultType string). Required. + :vartype version_type_properties_version: JSON :ivar linked_service_name: Azure Storage linked service to be used by the on-demand cluster for storing and processing data. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference @@ -34532,7 +35207,7 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many "type": {"required": True}, "cluster_size": {"required": True}, "time_to_live": {"required": True}, - "version": {"required": True}, + "version_type_properties_version": {"required": True}, "linked_service_name": {"required": True}, "host_subscription_id": {"required": True}, "tenant": {"required": True}, @@ -34542,13 +35217,14 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "cluster_size": {"key": "typeProperties.clusterSize", "type": "object"}, "time_to_live": {"key": "typeProperties.timeToLive", "type": "object"}, - "version": {"key": "typeProperties.version", "type": "object"}, + "version_type_properties_version": {"key": "typeProperties.version", "type": "object"}, "linked_service_name": {"key": "typeProperties.linkedServiceName", "type": "LinkedServiceReference"}, "host_subscription_id": {"key": "typeProperties.hostSubscriptionId", "type": "object"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, @@ -34593,12 +35269,13 @@ def __init__( # pylint: disable=too-many-locals *, cluster_size: JSON, time_to_live: JSON, - version: JSON, + version_type_properties_version: JSON, linked_service_name: "_models.LinkedServiceReference", host_subscription_id: JSON, tenant: JSON, cluster_resource_group: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -34636,6 +35313,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -34652,9 +35331,9 @@ def __init__( # pylint: disable=too-many-locals are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). Required. :paramtype time_to_live: JSON - :keyword version: Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). Required. - :paramtype version: JSON + :keyword version_type_properties_version: Version of the HDInsight cluster.  Type: string (or + Expression with resultType string). Required. + :paramtype version_type_properties_version: JSON :keyword linked_service_name: Azure Storage linked service to be used by the on-demand cluster for storing and processing data. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference @@ -34747,6 +35426,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -34756,7 +35436,7 @@ def __init__( # pylint: disable=too-many-locals self.type: str = "HDInsightOnDemand" self.cluster_size = cluster_size self.time_to_live = time_to_live - self.version = version + self.version_type_properties_version = version_type_properties_version self.linked_service_name = linked_service_name self.host_subscription_id = host_subscription_id self.service_principal_id = service_principal_id @@ -35314,6 +35994,8 @@ class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -35383,6 +36065,7 @@ class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -35406,12 +36089,13 @@ class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-att "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, host: JSON, authentication_type: Union[str, "_models.HiveAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -35437,6 +36121,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -35498,6 +36184,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -35908,6 +36595,8 @@ class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -35957,6 +36646,7 @@ class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -35980,6 +36670,7 @@ def __init__( *, url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -35998,6 +36689,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -36040,6 +36733,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -36321,6 +37015,8 @@ class HubspotLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -36362,6 +37058,7 @@ class HubspotLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -36381,6 +37078,7 @@ def __init__( *, client_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -36398,6 +37096,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -36432,6 +37132,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -36783,6 +37484,8 @@ class ImpalaLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -36834,6 +37537,7 @@ class ImpalaLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -36857,6 +37561,7 @@ def __init__( host: JSON, authentication_type: Union[str, "_models.ImpalaAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -36876,6 +37581,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -36920,6 +37627,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -37173,6 +37881,8 @@ class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -37210,6 +37920,7 @@ class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -37227,6 +37938,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -37242,6 +37954,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -37272,6 +37986,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -38826,6 +39541,8 @@ class JiraLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -38868,6 +39585,7 @@ class JiraLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -38888,6 +39606,7 @@ def __init__( host: JSON, username: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -38904,6 +39623,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -38939,6 +39660,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -39694,6 +40416,8 @@ class LakeHouseLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -39739,6 +40463,7 @@ class LakeHouseLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -39757,6 +40482,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -39775,6 +40501,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -39814,6 +40542,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -41054,6 +41783,8 @@ class MagentoLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -41089,6 +41820,7 @@ class MagentoLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -41106,6 +41838,7 @@ def __init__( *, host: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -41121,6 +41854,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -41149,6 +41884,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -42599,6 +43335,8 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -42636,6 +43374,7 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -42654,6 +43393,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -42672,6 +43412,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -42703,6 +43445,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -42934,6 +43677,8 @@ class MarketoLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -42972,6 +43717,7 @@ class MarketoLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -42991,6 +43737,7 @@ def __init__( endpoint: JSON, client_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -43006,6 +43753,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -43037,6 +43786,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -43293,6 +44043,8 @@ class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-i :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -43330,6 +44082,7 @@ class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-i _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -43347,6 +44100,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -43362,6 +44116,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -43392,6 +44148,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -43819,6 +44576,8 @@ class MongoDbAtlasLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -43848,6 +44607,7 @@ class MongoDbAtlasLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -43863,6 +44623,7 @@ def __init__( connection_string: JSON, database: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -43874,6 +44635,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -43895,6 +44658,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -44319,6 +45083,8 @@ class MongoDbLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -44367,6 +45133,7 @@ class MongoDbLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -44389,6 +45156,7 @@ def __init__( server: JSON, database_name: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -44407,6 +45175,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -44449,6 +45219,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -44676,6 +45447,8 @@ class MongoDbV2LinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -44702,6 +45475,7 @@ class MongoDbV2LinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -44716,6 +45490,7 @@ def __init__( connection_string: JSON, database: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -44726,6 +45501,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -44744,6 +45521,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -44993,6 +45771,8 @@ class MySqlLinkedService(LinkedService): # pylint: disable=too-many-instance-at :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -45036,6 +45816,7 @@ class MySqlLinkedService(LinkedService): # pylint: disable=too-many-instance-at _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -45056,6 +45837,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -45076,6 +45858,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -45113,6 +45897,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -45344,6 +46129,8 @@ class NetezzaLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -45369,6 +46156,7 @@ class NetezzaLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -45382,6 +46170,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -45395,6 +46184,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -45414,6 +46205,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -45721,7 +46513,8 @@ class NotebookParameter(_serialization.Model): :ivar value: Notebook parameter value. Type: string (or Expression with resultType string). :vartype value: JSON - :ivar type: Notebook parameter type. Known values are: "string", "int", "float", and "bool". + :ivar type: Notebook parameter type. Known values are: "string", "int", "float", "bool", and + "int". :vartype type: str or ~azure.mgmt.datafactory.models.NotebookParameterType """ @@ -45740,7 +46533,8 @@ def __init__( """ :keyword value: Notebook parameter value. Type: string (or Expression with resultType string). :paramtype value: JSON - :keyword type: Notebook parameter type. Known values are: "string", "int", "float", and "bool". + :keyword type: Notebook parameter type. Known values are: "string", "int", "float", "bool", and + "int". :paramtype type: str or ~azure.mgmt.datafactory.models.NotebookParameterType """ super().__init__(**kwargs) @@ -45758,6 +46552,8 @@ class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-at :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -45822,6 +46618,7 @@ class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-at _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -45853,6 +46650,7 @@ def __init__( *, url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -45878,6 +46676,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -45935,6 +46735,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -46176,6 +46977,8 @@ class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -46212,6 +47015,7 @@ class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -46229,6 +47033,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -46244,6 +47049,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -46273,6 +47080,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -46716,6 +47524,8 @@ class Office365LinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -46751,6 +47561,7 @@ class Office365LinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -46770,6 +47581,7 @@ def __init__( service_principal_id: JSON, service_principal_key: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -46781,6 +47593,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -46806,6 +47620,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -47299,7 +48114,7 @@ def __init__( self.metric_specifications = metric_specifications -class OracleCloudStorageLinkedService(LinkedService): +class OracleCloudStorageLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for Oracle Cloud Storage. All required parameters must be populated in order to send to server. @@ -47309,6 +48124,8 @@ class OracleCloudStorageLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -47340,6 +48157,7 @@ class OracleCloudStorageLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -47354,6 +48172,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -47368,6 +48187,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -47393,6 +48214,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -47637,6 +48459,8 @@ class OracleLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -47663,6 +48487,7 @@ class OracleLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -47677,6 +48502,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -47689,6 +48515,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -47708,6 +48536,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -47786,6 +48615,8 @@ class OracleServiceCloudLinkedService(LinkedService): # pylint: disable=too-man :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -47827,6 +48658,7 @@ class OracleServiceCloudLinkedService(LinkedService): # pylint: disable=too-man _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -47847,6 +48679,7 @@ def __init__( username: JSON, password: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -47861,6 +48694,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -47893,6 +48728,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -48930,7 +49766,7 @@ class ParameterSpecification(_serialization.Model): All required parameters must be populated in order to send to server. :ivar type: Parameter type. Required. Known values are: "Object", "String", "Int", "Float", - "Bool", "Array", and "SecureString". + "Bool", "Array", "SecureString", and "Int". :vartype type: str or ~azure.mgmt.datafactory.models.ParameterType :ivar default_value: Default value of parameter. :vartype default_value: JSON @@ -48950,7 +49786,7 @@ def __init__( ) -> None: """ :keyword type: Parameter type. Required. Known values are: "Object", "String", "Int", "Float", - "Bool", "Array", and "SecureString". + "Bool", "Array", "SecureString", and "Int". :paramtype type: str or ~azure.mgmt.datafactory.models.ParameterType :keyword default_value: Default value of parameter. :paramtype default_value: JSON @@ -49435,6 +50271,8 @@ class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -49473,6 +50311,7 @@ class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -49492,6 +50331,7 @@ def __init__( host: JSON, client_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -49507,6 +50347,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -49537,6 +50379,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -49767,6 +50610,8 @@ class PhoenixLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -49823,6 +50668,7 @@ class PhoenixLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -49847,6 +50693,7 @@ def __init__( host: JSON, authentication_type: Union[str, "_models.PhoenixAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -49867,6 +50714,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -49915,6 +50764,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -50724,6 +51574,8 @@ class PostgreSqlLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -50750,6 +51602,7 @@ class PostgreSqlLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -50764,6 +51617,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -50776,6 +51630,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -50795,6 +51651,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -51037,6 +51894,8 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -51107,6 +51966,7 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -51140,6 +52000,7 @@ def __init__( # pylint: disable=too-many-locals database: JSON, ssl_mode: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -51165,6 +52026,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -51227,6 +52090,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -51665,6 +52529,8 @@ class PrestoLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -51724,6 +52590,7 @@ class PrestoLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -51752,6 +52619,7 @@ def __init__( catalog: JSON, authentication_type: Union[str, "_models.PrestoAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -51772,6 +52640,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -51822,6 +52692,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -52450,6 +53321,8 @@ class QuickbaseLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -52477,6 +53350,7 @@ class QuickbaseLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -52492,6 +53366,7 @@ def __init__( url: JSON, user_token: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -52503,6 +53378,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -52522,6 +53399,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -52544,6 +53422,8 @@ class QuickBooksLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -52582,6 +53462,7 @@ class QuickBooksLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -52601,6 +53482,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -52620,6 +53502,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -52652,6 +53536,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -53426,6 +54311,8 @@ class ResponsysLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -53467,6 +54354,7 @@ class ResponsysLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -53486,6 +54374,7 @@ def __init__( endpoint: JSON, client_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -53501,6 +54390,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -53534,6 +54425,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -53900,6 +54792,8 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -53964,6 +54858,18 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta :ivar scope: The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). :vartype scope: JSON + :ivar service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :vartype service_principal_credential_type: JSON + :ivar service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :vartype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :vartype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -53975,6 +54881,7 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -54000,6 +54907,12 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta "token_endpoint": {"key": "typeProperties.tokenEndpoint", "type": "object"}, "resource": {"key": "typeProperties.resource", "type": "object"}, "scope": {"key": "typeProperties.scope", "type": "object"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_embedded_cert": {"key": "typeProperties.servicePrincipalEmbeddedCert", "type": "SecretBase"}, + "service_principal_embedded_cert_password": { + "key": "typeProperties.servicePrincipalEmbeddedCertPassword", + "type": "SecretBase", + }, } def __init__( # pylint: disable=too-many-locals @@ -54008,6 +54921,7 @@ def __init__( # pylint: disable=too-many-locals url: JSON, authentication_type: Union[str, "_models.RestServiceAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -54028,12 +54942,17 @@ def __init__( # pylint: disable=too-many-locals token_endpoint: Optional[JSON] = None, resource: Optional[JSON] = None, scope: Optional[JSON] = None, + service_principal_credential_type: Optional[JSON] = None, + service_principal_embedded_cert: Optional["_models.SecretBase"] = None, + service_principal_embedded_cert_password: Optional["_models.SecretBase"] = None, **kwargs: Any ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -54098,9 +55017,22 @@ def __init__( # pylint: disable=too-many-locals :keyword scope: The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression with resultType string). :paramtype scope: JSON + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: JSON + :keyword service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :paramtype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :paramtype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -54126,6 +55058,9 @@ def __init__( # pylint: disable=too-many-locals self.token_endpoint = token_endpoint self.resource = resource self.scope = scope + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password class RestSink(CopySink): # pylint: disable=too-many-instance-attributes @@ -54523,7 +55458,7 @@ class RunQueryFilter(_serialization.Model): "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :vartype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :ivar operator: Operator to be used for filter. Required. Known values are: "Equals", - "NotEquals", "In", and "NotIn". + "NotEquals", "In", "NotIn", and "In". :vartype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :ivar values: List of filter values. Required. :vartype values: list[str] @@ -54558,7 +55493,7 @@ def __init__( "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :paramtype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :keyword operator: Operator to be used for filter. Required. Known values are: "Equals", - "NotEquals", "In", and "NotIn". + "NotEquals", "In", "NotIn", and "In". :paramtype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :keyword values: List of filter values. Required. :paramtype values: list[str] @@ -54628,6 +55563,8 @@ class SalesforceLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -54663,6 +55600,7 @@ class SalesforceLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -54679,6 +55617,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -54695,6 +55634,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -54724,6 +55665,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -54749,6 +55691,8 @@ class SalesforceMarketingCloudLinkedService(LinkedService): # pylint: disable=t :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -54789,6 +55733,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): # pylint: disable=t _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -54806,6 +55751,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -54823,6 +55769,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -54857,6 +55805,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -55191,6 +56140,8 @@ class SalesforceServiceCloudLinkedService(LinkedService): # pylint: disable=too :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -55229,6 +56180,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): # pylint: disable=too _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -55246,6 +56198,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -55263,6 +56216,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -55295,6 +56250,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -55654,6 +56610,8 @@ class SalesforceServiceCloudV2LinkedService(LinkedService): # pylint: disable=t :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -55690,6 +56648,7 @@ class SalesforceServiceCloudV2LinkedService(LinkedService): # pylint: disable=t _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -55706,6 +56665,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -55722,6 +56682,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -55752,6 +56714,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -56376,6 +57339,8 @@ class SalesforceV2LinkedService(LinkedService): # pylint: disable=too-many-inst :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -56412,6 +57377,7 @@ class SalesforceV2LinkedService(LinkedService): # pylint: disable=too-many-inst _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -56428,6 +57394,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -56444,6 +57411,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -56474,6 +57443,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -56964,6 +57934,8 @@ class SapBWLinkedService(LinkedService): # pylint: disable=too-many-instance-at :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -57001,6 +57973,7 @@ class SapBWLinkedService(LinkedService): # pylint: disable=too-many-instance-at _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -57020,6 +57993,7 @@ def __init__( system_number: JSON, client_id: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -57033,6 +58007,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -57061,6 +58037,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -57176,7 +58153,7 @@ def __init__( self.query = query -class SapCloudForCustomerLinkedService(LinkedService): +class SapCloudForCustomerLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for SAP Cloud for Customer. All required parameters must be populated in order to send to server. @@ -57186,6 +58163,8 @@ class SapCloudForCustomerLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -57217,6 +58196,7 @@ class SapCloudForCustomerLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -57232,6 +58212,7 @@ def __init__( *, url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -57245,6 +58226,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -57269,6 +58252,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -57619,7 +58603,7 @@ def __init__( self.http_request_timeout = http_request_timeout -class SapEccLinkedService(LinkedService): +class SapEccLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for SAP ERP Central Component(SAP ECC). All required parameters must be populated in order to send to server. @@ -57629,6 +58613,8 @@ class SapEccLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -57660,6 +58646,7 @@ class SapEccLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -57675,6 +58662,7 @@ def __init__( *, url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -57688,6 +58676,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -57712,6 +58702,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -57955,6 +58946,8 @@ class SapHanaLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -57989,6 +58982,7 @@ class SapHanaLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -58005,6 +58999,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -58021,6 +59016,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -58049,6 +59046,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -58334,6 +59332,8 @@ class SapOdpLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -58406,6 +59406,7 @@ class SapOdpLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -58434,6 +59435,7 @@ def __init__( # pylint: disable=too-many-locals self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -58462,6 +59464,8 @@ def __init__( # pylint: disable=too-many-locals :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -58528,6 +59532,7 @@ def __init__( # pylint: disable=too-many-locals """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -58809,6 +59814,8 @@ class SapOpenHubLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -58860,6 +59867,7 @@ class SapOpenHubLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -58881,6 +59889,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -58902,6 +59911,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -58948,6 +59959,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -59236,6 +60248,8 @@ class SapTableLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -59302,6 +60316,7 @@ class SapTableLinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -59328,6 +60343,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -59354,6 +60370,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -59414,6 +60432,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -60708,6 +61727,8 @@ class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -60757,6 +61778,7 @@ class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -60779,6 +61801,7 @@ def __init__( endpoint: JSON, authentication_type: Union[str, "_models.ServiceNowAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -60797,6 +61820,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -60838,6 +61863,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -61071,6 +62097,8 @@ class ServiceNowV2LinkedService(LinkedService): # pylint: disable=too-many-inst :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -61112,6 +62140,7 @@ class ServiceNowV2LinkedService(LinkedService): # pylint: disable=too-many-inst _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -61132,6 +62161,7 @@ def __init__( endpoint: JSON, authentication_type: Union[str, "_models.ServiceNowV2AuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -61148,6 +62178,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -61181,6 +62213,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -61800,6 +62833,8 @@ class SftpServerLinkedService(LinkedService): # pylint: disable=too-many-instan :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -61854,6 +62889,7 @@ class SftpServerLinkedService(LinkedService): # pylint: disable=too-many-instan _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -61876,6 +62912,7 @@ def __init__( *, host: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -61896,6 +62933,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -61943,6 +62982,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -62066,6 +63106,8 @@ class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-m :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -62087,8 +63129,20 @@ class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-m string (or Expression with resultType string). Required. :vartype service_principal_id: JSON :ivar service_principal_key: The client secret of your application registered in Azure Active - Directory. Type: string (or Expression with resultType string). Required. + Directory. Type: string (or Expression with resultType string). :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :vartype service_principal_credential_type: JSON + :ivar service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :vartype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :ivar service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :vartype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str @@ -62099,12 +63153,12 @@ class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-m "site_url": {"required": True}, "tenant_id": {"required": True}, "service_principal_id": {"required": True}, - "service_principal_key": {"required": True}, } _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -62113,6 +63167,12 @@ class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-m "tenant_id": {"key": "typeProperties.tenantId", "type": "object"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_embedded_cert": {"key": "typeProperties.servicePrincipalEmbeddedCert", "type": "SecretBase"}, + "service_principal_embedded_cert_password": { + "key": "typeProperties.servicePrincipalEmbeddedCertPassword", + "type": "SecretBase", + }, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -62122,12 +63182,16 @@ def __init__( site_url: JSON, tenant_id: JSON, service_principal_id: JSON, - service_principal_key: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, + service_principal_key: Optional["_models.SecretBase"] = None, + service_principal_credential_type: Optional[JSON] = None, + service_principal_embedded_cert: Optional["_models.SecretBase"] = None, + service_principal_embedded_cert_password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: @@ -62135,6 +63199,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -62156,14 +63222,27 @@ def __init__( Type: string (or Expression with resultType string). Required. :paramtype service_principal_id: JSON :keyword service_principal_key: The client secret of your application registered in Azure - Active Directory. Type: string (or Expression with resultType string). Required. + Active Directory. Type: string (or Expression with resultType string). :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: JSON + :keyword service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :paramtype service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase + :keyword service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :paramtype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -62175,6 +63254,9 @@ def __init__( self.tenant_id = tenant_id self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password self.encrypted_credential = encrypted_credential @@ -62387,6 +63469,8 @@ class ShopifyLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -62423,6 +63507,7 @@ class ShopifyLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -62440,6 +63525,7 @@ def __init__( *, host: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -62455,6 +63541,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -62484,6 +63572,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -62745,6 +63834,8 @@ class SmartsheetLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -62768,6 +63859,7 @@ class SmartsheetLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -62781,6 +63873,7 @@ def __init__( *, api_token: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -62792,6 +63885,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -62808,6 +63903,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -62952,6 +64048,9 @@ class SnowflakeExportCopyCommand(ExportSettings): object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. :vartype additional_format_options: dict[str, JSON] + :ivar storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :vartype storage_integration: JSON """ _validation = { @@ -62963,6 +64062,7 @@ class SnowflakeExportCopyCommand(ExportSettings): "type": {"key": "type", "type": "str"}, "additional_copy_options": {"key": "additionalCopyOptions", "type": "{object}"}, "additional_format_options": {"key": "additionalFormatOptions", "type": "{object}"}, + "storage_integration": {"key": "storageIntegration", "type": "object"}, } def __init__( @@ -62971,6 +64071,7 @@ def __init__( additional_properties: Optional[Dict[str, JSON]] = None, additional_copy_options: Optional[Dict[str, JSON]] = None, additional_format_options: Optional[Dict[str, JSON]] = None, + storage_integration: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -62987,11 +64088,15 @@ def __init__( object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. :paramtype additional_format_options: dict[str, JSON] + :keyword storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :paramtype storage_integration: JSON """ super().__init__(additional_properties=additional_properties, **kwargs) self.type: str = "SnowflakeExportCopyCommand" self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options + self.storage_integration = storage_integration class SnowflakeImportCopyCommand(ImportSettings): @@ -63014,6 +64119,9 @@ class SnowflakeImportCopyCommand(ImportSettings): object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. :vartype additional_format_options: dict[str, JSON] + :ivar storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :vartype storage_integration: JSON """ _validation = { @@ -63025,6 +64133,7 @@ class SnowflakeImportCopyCommand(ImportSettings): "type": {"key": "type", "type": "str"}, "additional_copy_options": {"key": "additionalCopyOptions", "type": "{object}"}, "additional_format_options": {"key": "additionalFormatOptions", "type": "{object}"}, + "storage_integration": {"key": "storageIntegration", "type": "object"}, } def __init__( @@ -63033,6 +64142,7 @@ def __init__( additional_properties: Optional[Dict[str, JSON]] = None, additional_copy_options: Optional[Dict[str, JSON]] = None, additional_format_options: Optional[Dict[str, JSON]] = None, + storage_integration: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -63049,11 +64159,15 @@ def __init__( object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. :paramtype additional_format_options: dict[str, JSON] + :keyword storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :paramtype storage_integration: JSON """ super().__init__(additional_properties=additional_properties, **kwargs) self.type: str = "SnowflakeImportCopyCommand" self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options + self.storage_integration = storage_integration class SnowflakeLinkedService(LinkedService): @@ -63066,6 +64180,8 @@ class SnowflakeLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -63092,6 +64208,7 @@ class SnowflakeLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -63106,6 +64223,7 @@ def __init__( *, connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -63118,6 +64236,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -63137,6 +64257,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -63471,6 +64592,8 @@ class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-insta :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -63525,6 +64648,7 @@ class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-insta _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -63551,6 +64675,7 @@ def __init__( database: JSON, warehouse: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -63571,6 +64696,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -63617,6 +64744,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -63885,6 +65013,8 @@ class SparkLinkedService(LinkedService): # pylint: disable=too-many-instance-at :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -63946,6 +65076,7 @@ class SparkLinkedService(LinkedService): # pylint: disable=too-many-instance-at _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -63973,6 +65104,7 @@ def __init__( port: JSON, authentication_type: Union[str, "_models.SparkAuthenticationType"], additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -63994,6 +65126,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -64047,6 +65181,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -65107,6 +66242,8 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -65198,7 +66335,7 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc AzureKeyVaultSecretReference. :vartype connection_string: JSON :ivar authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). @@ -65210,6 +66347,8 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -65219,6 +66358,7 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -65251,12 +66391,14 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc "key": "typeProperties.alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties", }, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( # pylint: disable=too-many-locals self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -65286,12 +66428,15 @@ def __init__( # pylint: disable=too-many-locals password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, + credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -65383,7 +66528,7 @@ def __init__( # pylint: disable=too-many-locals AzureKeyVaultSecretReference. :paramtype connection_string: JSON :keyword authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :keyword user_name: The on-premises Windows authentication user name. Type: string (or @@ -65397,9 +66542,12 @@ def __init__( # pylint: disable=too-many-locals :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -65432,6 +66580,7 @@ def __init__( # pylint: disable=too-many-locals self.password = password self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class SqlServerLinkedServiceTypeProperties( @@ -65522,7 +66671,7 @@ class SqlServerLinkedServiceTypeProperties( AzureKeyVaultSecretReference. :vartype connection_string: JSON :ivar authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). @@ -65534,6 +66683,8 @@ class SqlServerLinkedServiceTypeProperties( :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -65562,6 +66713,7 @@ class SqlServerLinkedServiceTypeProperties( "password": {"key": "password", "type": "SecretBase"}, "encrypted_credential": {"key": "encryptedCredential", "type": "str"}, "always_encrypted_settings": {"key": "alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties"}, + "credential": {"key": "credential", "type": "CredentialReference"}, } def __init__( # pylint: disable=too-many-locals @@ -65592,6 +66744,7 @@ def __init__( # pylint: disable=too-many-locals password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, + credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: """ @@ -65678,7 +66831,7 @@ def __init__( # pylint: disable=too-many-locals AzureKeyVaultSecretReference. :paramtype connection_string: JSON :keyword authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :keyword user_name: The on-premises Windows authentication user name. Type: string (or @@ -65692,6 +66845,8 @@ def __init__( # pylint: disable=too-many-locals :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ super().__init__( server=server, @@ -65721,6 +66876,7 @@ def __init__( # pylint: disable=too-many-locals self.password = password self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class SqlServerSink(CopySink): # pylint: disable=too-many-instance-attributes @@ -66674,6 +67830,8 @@ class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -66716,6 +67874,7 @@ class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -66735,6 +67894,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -66754,6 +67914,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -66790,6 +67952,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -68056,7 +69219,7 @@ class StoredProcedureParameter(_serialization.Model): string). :vartype value: JSON :ivar type: Stored procedure parameter type. Known values are: "String", "Int", "Int64", - "Decimal", "Guid", "Boolean", and "Date". + "Decimal", "Guid", "Boolean", "Date", and "Int". :vartype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ @@ -68077,7 +69240,7 @@ def __init__( string). :paramtype value: JSON :keyword type: Stored procedure parameter type. Known values are: "String", "Int", "Int64", - "Decimal", "Guid", "Boolean", and "Date". + "Decimal", "Guid", "Boolean", "Date", and "Int". :paramtype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ super().__init__(**kwargs) @@ -68242,6 +69405,8 @@ class SybaseLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -68280,6 +69445,7 @@ class SybaseLinkedService(LinkedService): # pylint: disable=too-many-instance-a _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -68299,6 +69465,7 @@ def __init__( server: JSON, database: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -68314,6 +69481,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -68345,6 +69514,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -69310,6 +70480,8 @@ class TeamDeskLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -69345,6 +70517,7 @@ class TeamDeskLinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -69363,6 +70536,7 @@ def __init__( authentication_type: Union[str, "_models.TeamDeskAuthenticationType"], url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -69377,6 +70551,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -69405,6 +70581,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -69430,6 +70607,8 @@ class TeradataLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -69463,6 +70642,7 @@ class TeradataLinkedService(LinkedService): # pylint: disable=too-many-instance _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -69479,6 +70659,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -69495,6 +70676,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -69524,6 +70707,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -70553,6 +71737,8 @@ class TwilioLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -70577,6 +71763,7 @@ class TwilioLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -70591,6 +71778,7 @@ def __init__( user_name: JSON, password: "_models.SecretBase", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -70601,6 +71789,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -70617,6 +71807,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -71151,7 +72342,7 @@ def __init__( self.default_value = default_value -class VerticaLinkedService(LinkedService): +class VerticaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Vertica linked service. All required parameters must be populated in order to send to server. @@ -71161,6 +72352,8 @@ class VerticaLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -71172,6 +72365,14 @@ class VerticaLinkedService(LinkedService): :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :vartype connection_string: JSON + :ivar server: Server name for connection. Type: string. + :vartype server: JSON + :ivar port: The port for the connection. Type: integer. + :vartype port: JSON + :ivar uid: Username for authentication. Type: string. + :vartype uid: JSON + :ivar database: Database name for connection. Type: string. + :vartype database: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -71186,11 +72387,16 @@ class VerticaLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "uid": {"key": "typeProperties.uid", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -71199,11 +72405,16 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, + server: Optional[JSON] = None, + port: Optional[JSON] = None, + uid: Optional[JSON] = None, + database: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any @@ -71212,6 +72423,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -71223,6 +72436,14 @@ def __init__( :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :paramtype connection_string: JSON + :keyword server: Server name for connection. Type: string. + :paramtype server: JSON + :keyword port: The port for the connection. Type: integer. + :paramtype port: JSON + :keyword uid: Username for authentication. Type: string. + :paramtype uid: JSON + :keyword database: Database name for connection. Type: string. + :paramtype database: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials @@ -71231,6 +72452,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -71239,6 +72461,10 @@ def __init__( ) self.type: str = "Vertica" self.connection_string = connection_string + self.server = server + self.port = port + self.uid = uid + self.database = database self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -71575,6 +72801,8 @@ class WarehouseLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -71625,6 +72853,7 @@ class WarehouseLinkedService(LinkedService): # pylint: disable=too-many-instanc _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -71646,6 +72875,7 @@ def __init__( artifact_id: JSON, endpoint: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -71663,6 +72893,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -71705,6 +72937,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -72755,6 +73988,8 @@ class WebLinkedService(LinkedService): :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -72775,6 +74010,7 @@ class WebLinkedService(LinkedService): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -72787,6 +74023,7 @@ def __init__( *, type_properties: "_models.WebLinkedServiceTypeProperties", additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -72797,6 +74034,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -72810,6 +74049,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -73097,6 +74337,8 @@ class XeroLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -73138,6 +74380,7 @@ class XeroLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -73156,6 +74399,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -73174,6 +74418,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -73210,6 +74456,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -73754,6 +75001,8 @@ class ZendeskLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -73789,6 +75038,7 @@ class ZendeskLinkedService(LinkedService): # pylint: disable=too-many-instance- _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -73807,6 +75057,7 @@ def __init__( authentication_type: Union[str, "_models.ZendeskAuthenticationType"], url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -73821,6 +75072,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -73848,6 +75101,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, @@ -73918,6 +75172,8 @@ class ZohoLinkedService(LinkedService): # pylint: disable=too-many-instance-att :vartype additional_properties: dict[str, JSON] :ivar type: Type of linked service. Required. :vartype type: str + :ivar version: Version of the linked service. + :vartype version: str :ivar connect_via: The integration runtime reference. :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :ivar description: Linked service description. @@ -73955,6 +75211,7 @@ class ZohoLinkedService(LinkedService): # pylint: disable=too-many-instance-att _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, + "version": {"key": "version", "type": "str"}, "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, @@ -73972,6 +75229,7 @@ def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, + version: Optional[str] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, @@ -73989,6 +75247,8 @@ def __init__( :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] + :keyword version: Version of the linked service. + :paramtype version: str :keyword connect_via: The integration runtime reference. :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :keyword description: Linked service description. @@ -74020,6 +75280,7 @@ def __init__( """ super().__init__( additional_properties=additional_properties, + version=version, connect_via=connect_via, description=description, parameters=parameters, diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py index d3afb059e48..12464e49f48 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -220,7 +218,6 @@ def query_by_pipeline_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -234,7 +231,7 @@ def query_by_pipeline_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_change_data_capture_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_change_data_capture_operations.py index eea39471fce..e7ffb77afaa 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_change_data_capture_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_change_data_capture_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -448,7 +446,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -464,7 +461,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -627,7 +623,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -641,7 +636,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -697,7 +692,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -711,7 +705,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -757,7 +751,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -813,7 +806,6 @@ def start( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -869,7 +861,6 @@ def stop( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -923,7 +914,6 @@ def status(self, resource_group_name: str, factory_name: str, change_data_captur headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -937,7 +927,7 @@ def status(self, resource_group_name: str, factory_name: str, change_data_captur map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("str", pipeline_response) + deserialized = self._deserialize("str", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_credential_operations_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_credential_operations_operations.py index 5ec7d70c2a1..adbba32f483 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_credential_operations_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_credential_operations_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -303,7 +301,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -319,7 +316,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -478,7 +474,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -492,7 +487,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -548,7 +543,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -564,7 +558,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,7 +604,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index 879a98f2b5c..84a649ab4a6 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -281,7 +281,7 @@ def _create_initial( factory_name: str, request: Union[_models.CreateDataFlowDebugSessionRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CreateDataFlowDebugSessionResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -295,7 +295,7 @@ def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -316,10 +316,10 @@ def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -327,17 +327,19 @@ def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) - if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -443,10 +445,11 @@ def begin_create( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -508,7 +511,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -524,7 +526,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -660,7 +661,6 @@ def add_data_flow( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -674,7 +674,7 @@ def add_data_flow( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -788,7 +788,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -811,7 +810,7 @@ def _execute_command_initial( factory_name: str, request: Union[_models.DataFlowDebugCommandRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.DataFlowDebugCommandResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -825,7 +824,7 @@ def _execute_command_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataFlowDebugCommandResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -846,10 +845,10 @@ def _execute_command_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -857,17 +856,19 @@ def _execute_command_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) - if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -973,10 +974,11 @@ def begin_execute_command( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py index 7203524250b..062bf072336 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -394,7 +392,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -408,7 +405,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -464,7 +461,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -478,7 +474,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -524,7 +520,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -580,7 +575,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -596,7 +590,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py index e66e71189f3..62880ce2cad 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -303,7 +301,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -319,7 +316,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -478,7 +474,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -492,7 +487,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -548,7 +543,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -564,7 +558,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,7 +604,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index 7eee946d592..94d33618346 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -274,7 +272,6 @@ def get_feature_value( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -288,7 +285,7 @@ def get_feature_value( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -403,7 +400,6 @@ def get_feature_value_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -417,7 +413,7 @@ def get_feature_value_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -535,7 +531,6 @@ def query_feature_values_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -549,7 +544,7 @@ def query_feature_values_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py index b3e6a1adeab..8854b1aced5 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -438,7 +436,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -454,7 +451,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -572,7 +568,6 @@ def configure_factory_repo( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -586,7 +581,7 @@ def configure_factory_repo( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -627,7 +622,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -643,7 +637,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -792,7 +785,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -806,7 +798,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -921,7 +913,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -935,7 +926,7 @@ def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -983,7 +974,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -999,7 +989,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1042,7 +1032,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1167,7 +1156,6 @@ def get_git_hub_access_token( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1181,7 +1169,7 @@ def get_git_hub_access_token( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response) + deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1295,7 +1283,6 @@ def get_data_plane_access( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1309,7 +1296,7 @@ def get_data_plane_access( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AccessPolicyResponse", pipeline_response) + deserialized = self._deserialize("AccessPolicyResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py index b2eaa48001d..e68dbb25f6f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_global_parameters_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -288,7 +286,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -304,7 +301,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -372,7 +368,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -386,7 +381,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -510,7 +505,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -524,7 +518,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -570,7 +564,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py index 6514255e5e8..a925184863f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -328,7 +326,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -342,7 +339,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -391,7 +388,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -540,7 +536,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -554,7 +549,7 @@ def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -603,7 +598,6 @@ def get_ip_address( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -617,7 +611,7 @@ def get_ip_address( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index da07d23ee07..f3a6f5410db 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -16,12 +16,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,7 +30,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -162,7 +162,7 @@ def __init__(self, *args, **kwargs): def _refresh_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.SsisObjectMetadataStatusResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -175,7 +175,7 @@ def _refresh_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_refresh_request( resource_group_name=resource_group_name, @@ -186,10 +186,10 @@ def _refresh_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -197,12 +197,14 @@ def _refresh_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -246,10 +248,11 @@ def begin_refresh( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -397,7 +400,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -411,7 +413,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py index a697acdc95b..b3fec9c18dd 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -942,7 +942,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -958,7 +957,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1121,7 +1119,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1135,7 +1132,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1191,7 +1188,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1207,7 +1203,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1335,7 +1331,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1349,7 +1344,7 @@ def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1395,7 +1390,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1451,7 +1445,6 @@ def get_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1465,7 +1458,7 @@ def get_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1513,7 +1506,6 @@ def list_outbound_network_dependencies_endpoints( # pylint: disable=name-too-lo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1528,7 +1520,7 @@ def list_outbound_network_dependencies_endpoints( # pylint: disable=name-too-lo raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize( - "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response.http_response ) if cls: @@ -1576,7 +1568,6 @@ def get_connection_info( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1590,7 +1581,7 @@ def get_connection_info( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1719,7 +1710,6 @@ def regenerate_auth_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1733,7 +1723,7 @@ def regenerate_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1779,7 +1769,6 @@ def list_auth_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1793,7 +1782,7 @@ def list_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1802,7 +1791,7 @@ def list_auth_keys( def _start_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.IntegrationRuntimeStatusResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1815,7 +1804,7 @@ def _start_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1826,10 +1815,10 @@ def _start_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1837,12 +1826,14 @@ def _start_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1886,10 +1877,11 @@ def begin_start( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1911,9 +1903,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _stop_initial( # pylint: disable=inconsistent-return-statements + def _stop_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1926,7 +1918,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -1937,10 +1929,10 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1948,11 +1940,19 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_stop( @@ -1979,7 +1979,7 @@ def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( # type: ignore + raw_result = self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -1989,6 +1989,7 @@ def begin_stop( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2052,7 +2053,6 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2109,7 +2109,6 @@ def get_monitoring_data( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2123,7 +2122,7 @@ def get_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2169,7 +2168,6 @@ def upgrade( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2310,7 +2308,6 @@ def remove_links( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2450,7 +2447,6 @@ def create_linked_integration_runtime( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2464,7 +2460,7 @@ def create_linked_integration_runtime( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py index 336723385af..63d7ce31075 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -304,7 +302,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -320,7 +317,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -482,7 +478,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -496,7 +491,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -552,7 +547,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -568,7 +562,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -614,7 +608,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py index d6d035e10c3..20257def358 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -347,7 +345,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -363,7 +360,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -536,7 +532,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -550,7 +545,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,7 +605,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -624,7 +618,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -678,7 +672,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py index 8e8d3c925a4..f13244070d7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -257,7 +255,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -273,7 +270,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -436,7 +432,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -450,7 +445,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -506,7 +501,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -520,7 +514,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py index 6cc126f9ab3..b6be3c7fe24 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -109,7 +107,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -125,7 +122,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py index 2a211e013a6..9fc1d2636bd 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -299,7 +297,6 @@ def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -313,7 +310,7 @@ def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -357,7 +354,6 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -371,7 +367,7 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRun", pipeline_response) + deserialized = self._deserialize("PipelineRun", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -426,7 +422,6 @@ def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py index 6d3f7ef8bae..573caa3617a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -374,7 +372,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -390,7 +387,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -549,7 +545,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -563,7 +558,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -619,7 +614,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -635,7 +629,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -681,7 +675,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -879,7 +872,6 @@ def create_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -893,7 +885,7 @@ def create_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CreateRunResponse", pipeline_response) + deserialized = self._deserialize("CreateRunResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py index 99924ff8aef..3976f19d464 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -141,7 +139,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -157,7 +154,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py index 4ae9311014f..c731ff51c36 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -346,7 +344,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -360,7 +357,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -416,7 +413,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -430,7 +426,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -476,7 +472,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py index 69246e7ba86..02ddf24d599 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py @@ -18,15 +18,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -130,7 +128,6 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -144,7 +141,7 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py index b7287aa292e..885ad4224f8 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -242,7 +240,6 @@ def rerun( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -301,7 +298,6 @@ def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -425,7 +421,6 @@ def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -439,7 +434,7 @@ def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py index 34014519da0..90d8ee35aea 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -17,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +32,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -588,7 +588,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -604,7 +603,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -740,7 +738,6 @@ def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -754,7 +751,7 @@ def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -891,7 +888,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -905,7 +901,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -961,7 +957,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -977,7 +972,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1023,7 +1018,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1042,7 +1036,7 @@ def delete( # pylint: disable=inconsistent-return-statements def _subscribe_to_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1055,7 +1049,7 @@ def _subscribe_to_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_subscribe_to_events_request( resource_group_name=resource_group_name, @@ -1066,10 +1060,10 @@ def _subscribe_to_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1077,12 +1071,14 @@ def _subscribe_to_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1126,10 +1122,11 @@ def begin_subscribe_to_events( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1190,7 +1187,6 @@ def get_event_subscription_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1204,7 +1200,7 @@ def get_event_subscription_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1213,7 +1209,7 @@ def get_event_subscription_status( def _unsubscribe_from_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1226,7 +1222,7 @@ def _unsubscribe_from_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_unsubscribe_from_events_request( resource_group_name=resource_group_name, @@ -1237,10 +1233,10 @@ def _unsubscribe_from_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1248,12 +1244,14 @@ def _unsubscribe_from_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1297,10 +1295,11 @@ def begin_unsubscribe_from_events( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1322,9 +1321,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _start_initial( # pylint: disable=inconsistent-return-statements + def _start_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1337,7 +1336,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1348,10 +1347,10 @@ def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1359,11 +1358,19 @@ def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_start( @@ -1390,7 +1397,7 @@ def begin_start( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( # type: ignore + raw_result = self._start_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -1400,6 +1407,7 @@ def begin_start( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1421,9 +1429,9 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _stop_initial( # pylint: disable=inconsistent-return-statements + def _stop_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1436,7 +1444,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -1447,10 +1455,10 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1458,11 +1466,19 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_stop( @@ -1489,7 +1505,7 @@ def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( # type: ignore + raw_result = self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -1499,6 +1515,7 @@ def begin_stop( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/src/datafactory/setup.py b/src/datafactory/setup.py index fad84011757..cb149beb933 100644 --- a/src/datafactory/setup.py +++ b/src/datafactory/setup.py @@ -10,7 +10,7 @@ from setuptools import setup, find_packages # HISTORY.rst entry. -VERSION = "1.0.2" +VERSION = "1.0.3" try: from azext_datafactory.manual.version import VERSION except ImportError: