From 8c5aa23f92d81c5b5e3d318be6b3ca9e73abb123 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 10:59:51 -0700 Subject: [PATCH 1/8] Refactor: Use getattr instead of __getattribute__ --- src/snowflake/snowpark/_internal/analyzer/snowflake_plan.py | 4 +--- src/snowflake/snowpark/_internal/error_message.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/src/snowflake/snowpark/_internal/analyzer/snowflake_plan.py b/src/snowflake/snowpark/_internal/analyzer/snowflake_plan.py index bb4503bb25a..61fbaa4cbfb 100644 --- a/src/snowflake/snowpark/_internal/analyzer/snowflake_plan.py +++ b/src/snowflake/snowpark/_internal/analyzer/snowflake_plan.py @@ -115,9 +115,7 @@ def wrap(*args, **kwargs): try: return func(*args, **kwargs) except snowflake.connector.errors.ProgrammingError as e: - query = None - if "query" in e.__dict__: - query = e.__getattribute__("query") + query = getattr(e, "query", None) tb = sys.exc_info()[2] assert e.msg is not None if "unexpected 'as'" in e.msg.lower(): diff --git a/src/snowflake/snowpark/_internal/error_message.py b/src/snowflake/snowpark/_internal/error_message.py index f979ab85262..39d347fe298 100644 --- a/src/snowflake/snowpark/_internal/error_message.py +++ b/src/snowflake/snowpark/_internal/error_message.py @@ -306,9 +306,7 @@ def SQL_PYTHON_REPORT_JOIN_AMBIGUOUS( def SQL_EXCEPTION_FROM_PROGRAMMING_ERROR( pe: ProgrammingError, ) -> SnowparkSQLException: - query = None - if "query" in pe.__dict__: - query = pe.__getattribute__("query") + query = getattr(pe, "query", None) return SnowparkSQLException( pe.msg, "1304", From 35914130cb8008d49347ddd2f83c9878442fb64c Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 11:06:32 -0700 Subject: [PATCH 2/8] Bugfix: Fix bad constructor --- src/snowflake/snowpark/_internal/error_message.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/snowflake/snowpark/_internal/error_message.py b/src/snowflake/snowpark/_internal/error_message.py index 39d347fe298..acf07737055 100644 --- a/src/snowflake/snowpark/_internal/error_message.py +++ b/src/snowflake/snowpark/_internal/error_message.py @@ -239,8 +239,8 @@ def PLAN_CREATE_DYNAMIC_TABLE_FROM_DDL_DML_OPERATIONS() -> SnowparkCreateDynamic @staticmethod def DF_ALIAS_NOT_RECOGNIZED(alias: str) -> SnowparkDataframeException: return SnowparkDataframeException( - f"DataFrame alias unrecognized. A subset of columns corresponding to Dataframe alias '{alias}' can not be found. " - "1208", + f"DataFrame alias unrecognized. A subset of columns corresponding to Dataframe alias '{alias}' can not be found. ", + error_code="1208", ) @staticmethod From 8cc8df025c9fea48237873c989fbb29c4d7b86eb Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 11:07:18 -0700 Subject: [PATCH 3/8] Refactor: Use kwargs instead of specifying None for sfqid --- src/snowflake/snowpark/_internal/error_message.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/src/snowflake/snowpark/_internal/error_message.py b/src/snowflake/snowpark/_internal/error_message.py index acf07737055..691f436a630 100644 --- a/src/snowflake/snowpark/_internal/error_message.py +++ b/src/snowflake/snowpark/_internal/error_message.py @@ -266,9 +266,8 @@ def SQL_PYTHON_REPORT_UNEXPECTED_ALIAS( return SnowparkSQLUnexpectedAliasException( "You can only define aliases for the root Columns in a DataFrame returned by " "select() and agg(). You cannot use aliases for Columns in expressions.", - "1301", - None, # sfqid - query, + error_code="1301", + query=query, ) @staticmethod @@ -278,9 +277,8 @@ def SQL_PYTHON_REPORT_INVALID_ID( return SnowparkSQLInvalidIdException( f'The column specified in df("{name}") ' f"is not present in the output of the DataFrame.", - "1302", - None, # sfqid, - query, + error_code="1302", + query=query, ) @staticmethod @@ -297,9 +295,8 @@ def SQL_PYTHON_REPORT_JOIN_AMBIGUOUS( f"result of the join. Alternatively, you can rename the column in " f"either DataFrame for disambiguation. See the API documentation of " f"the DataFrame.join() method for more details.", - "1303", - None, # sfqid - query, + error_code="1303", + query=query, ) @staticmethod From c25b959644b1c6d6fec15560f6f27ed7b99c30b9 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 11:07:55 -0700 Subject: [PATCH 4/8] Refactor: Specify exception parameters as kwargs --- .../snowpark/_internal/error_message.py | 105 ++++++++++-------- 1 file changed, 60 insertions(+), 45 deletions(-) diff --git a/src/snowflake/snowpark/_internal/error_message.py b/src/snowflake/snowpark/_internal/error_message.py index 691f436a630..8db3b32dda4 100644 --- a/src/snowflake/snowpark/_internal/error_message.py +++ b/src/snowflake/snowpark/_internal/error_message.py @@ -40,7 +40,9 @@ class SnowparkClientExceptionMessages: @staticmethod def INTERNAL_TEST_MESSAGE(message: str) -> _SnowparkInternalException: - return _SnowparkInternalException(f"internal test message: {message}.", "1010") + return _SnowparkInternalException( + f"internal test message: {message}.", error_code="1010" + ) # DataFrame Error Messages 01XX @@ -49,12 +51,12 @@ def DF_CANNOT_DROP_COLUMN_NAME(col_name: str) -> SnowparkColumnException: return SnowparkColumnException( f"Unable to drop the column {col_name}. You must specify the column by name " f'(e.g. df.drop(col("a"))).', - "1100", + error_code="1100", ) @staticmethod def DF_CANNOT_DROP_ALL_COLUMNS() -> SnowparkColumnException: - return SnowparkColumnException("Cannot drop all columns", "1101") + return SnowparkColumnException("Cannot drop all columns", error_code="1101") @staticmethod def DF_CANNOT_RESOLVE_COLUMN_NAME_AMONG( @@ -63,7 +65,7 @@ def DF_CANNOT_RESOLVE_COLUMN_NAME_AMONG( return SnowparkColumnException( f'Cannot combine the DataFrames by column names. The column "{col_name}" is ' f"not a column in the other DataFrame ({all_columns}).", - "1102", + error_code="1102", ) @staticmethod @@ -80,7 +82,7 @@ def DF_SELF_JOIN_NOT_SUPPORTED() -> SnowparkJoinException: "You cannot join a DataFrame with itself because the column references cannot " "be resolved correctly. Instead, create a copy of the DataFrame with copy.copy(), " "and join the DataFrame with this copy.", - "1103", + error_code="1103", ) @staticmethod @@ -88,20 +90,21 @@ def DF_FLATTEN_UNSUPPORTED_INPUT_MODE(mode: str) -> SnowparkDataframeException: return SnowparkDataframeException( f"Unsupported input mode {mode}. For the mode parameter in flatten(), you must " f"specify OBJECT, ARRAY, or BOTH.", - "1104", + error_code="1104", ) @staticmethod def DF_CANNOT_RESOLVE_COLUMN_NAME(col_name: str) -> SnowparkColumnException: return SnowparkColumnException( - f"The DataFrame does not contain the column named {col_name}.", "1105" + f"The DataFrame does not contain the column named {col_name}.", + error_code="1105", ) @staticmethod def DF_MUST_PROVIDE_SCHEMA_FOR_READING_FILE() -> SnowparkDataframeReaderException: return SnowparkDataframeReaderException( "You must call DataFrameReader.schema() and specify the schema for the file.", - "1106", + error_code="1106", ) @staticmethod @@ -119,7 +122,7 @@ def DF_CROSS_TAB_COUNT_TOO_LARGE( return SnowparkDataframeException( f"The number of distinct values in the second input column ({count}) exceeds " f"the maximum number of distinct values allowed ({max_count}).", - "1107", + error_code="1107", ) @staticmethod @@ -129,7 +132,7 @@ def DF_DATAFRAME_IS_NOT_QUALIFIED_FOR_SCALAR_QUERY( return SnowparkDataframeException( f"The DataFrame passed in to this function must have only one output column. " f"This DataFrame has {count} output columns: {columns}", - "1108", + error_code="1108", ) @staticmethod @@ -137,29 +140,33 @@ def DF_PIVOT_ONLY_SUPPORT_ONE_AGG_EXPR() -> SnowparkDataframeException: return SnowparkDataframeException( "You can apply only one aggregate expression to a RelationalGroupedDataFrame " "returned by the pivot() method.", - "1109", + error_code="1109", ) @staticmethod def DF_JOIN_INVALID_JOIN_TYPE(type1: str, types: str) -> SnowparkJoinException: return SnowparkJoinException( f"Unsupported join type '{type1}'. Supported join types include: {types}.", - "1110", + error_code="1110", ) @staticmethod def DF_JOIN_INVALID_NATURAL_JOIN_TYPE(tpe: str) -> SnowparkJoinException: - return SnowparkJoinException(f"Unsupported natural join type '{tpe}'.", "1111") + return SnowparkJoinException( + f"Unsupported natural join type '{tpe}'.", error_code="1111" + ) @staticmethod def DF_JOIN_INVALID_USING_JOIN_TYPE(tpe: str) -> SnowparkJoinException: - return SnowparkJoinException(f"Unsupported using join type '{tpe}'.", "1112") + return SnowparkJoinException( + f"Unsupported using join type '{tpe}'.", error_code="1112" + ) @staticmethod def DF_PANDAS_GENERAL_EXCEPTION(msg: str) -> SnowparkPandasException: return SnowparkPandasException( f"Unable to write pandas dataframe to Snowflake. COPY INTO command output {msg}", - "1113", + error_code="1113", ) @staticmethod @@ -170,7 +177,7 @@ def DF_PANDAS_TABLE_DOES_NOT_EXIST_EXCEPTION( f"Cannot write pandas DataFrame to table {location} " f"because it does not exist. Create table before " f"trying to write a pandas DataFrame", - "1114", + error_code="1114", ) @staticmethod @@ -178,14 +185,15 @@ def MERGE_TABLE_ACTION_ALREADY_SPECIFIED( action: str, clause: str ) -> SnowparkTableException: return SnowparkTableException( - f"{action} has been specified for {clause} to merge table", "1115" + f"{action} has been specified for {clause} to merge table", + error_code="1115", ) # Plan Analysis error codes 02XX @staticmethod def PLAN_ANALYZER_INVALID_IDENTIFIER(name: str) -> SnowparkPlanException: - return SnowparkPlanException(f"Invalid identifier {name}", "1200") + return SnowparkPlanException(f"Invalid identifier {name}", error_code="1200") @staticmethod def PLAN_ANALYZER_UNSUPPORTED_VIEW_TYPE( @@ -194,7 +202,7 @@ def PLAN_ANALYZER_UNSUPPORTED_VIEW_TYPE( return SnowparkPlanException( f"Internal Error: Only PersistedView and LocalTempView are supported. " f"view type: {type_name}", - "1201", + error_code="1201", ) @staticmethod @@ -203,7 +211,7 @@ def PLAN_COPY_DONT_SUPPORT_SKIP_LOADED_FILES(value: str) -> SnowparkPlanExceptio f"The COPY option 'FORCE = {value}' is not supported by the Snowpark library. " f"The Snowflake library loads all files, even if the files have been loaded " f"previously and have not changed since they were loaded.", - "1202", + error_code="1202", ) @staticmethod @@ -211,29 +219,33 @@ def PLAN_CREATE_VIEW_FROM_DDL_DML_OPERATIONS() -> SnowparkCreateViewException: return SnowparkCreateViewException( "Your dataframe may include DDL or DML operations. Creating a view from " "this DataFrame is currently not supported.", - "1203", + error_code="1203", ) @staticmethod def PLAN_CREATE_VIEWS_FROM_SELECT_ONLY() -> SnowparkCreateViewException: return SnowparkCreateViewException( - "Creating views from SELECT queries supported only.", "1204" + "Creating views from SELECT queries supported only.", error_code="1204" ) @staticmethod def PLAN_INVALID_TYPE(type: str) -> SnowparkPlanException: - return SnowparkPlanException(f"Invalid type, analyze. {type}", "1205") + return SnowparkPlanException( + f"Invalid type, analyze. {type}", error_code="1205" + ) @staticmethod def PLAN_CANNOT_CREATE_LITERAL(type: str) -> SnowparkPlanException: - return SnowparkPlanException(f"Cannot create a Literal for {type}", "1206") + return SnowparkPlanException( + f"Cannot create a Literal for {type}", error_code="1206" + ) @staticmethod def PLAN_CREATE_DYNAMIC_TABLE_FROM_DDL_DML_OPERATIONS() -> SnowparkCreateDynamicTableException: return SnowparkCreateDynamicTableException( "Your dataframe may include DDL or DML operations. Creating a dynamic table from " "this DataFrame is currently not supported.", - "1207", + error_code="1207", ) @staticmethod @@ -246,7 +258,8 @@ def DF_ALIAS_NOT_RECOGNIZED(alias: str) -> SnowparkDataframeException: @staticmethod def PLAN_CREATE_DYNAMIC_TABLE_FROM_SELECT_ONLY() -> SnowparkCreateDynamicTableException: return SnowparkCreateDynamicTableException( - "Creating dynamic tables from SELECT queries supported only.", "1208" + "Creating dynamic tables from SELECT queries supported only.", + error_code="1208", ) # SQL Execution error codes 03XX @@ -256,7 +269,7 @@ def SQL_LAST_QUERY_RETURN_RESULTSET() -> SnowparkSQLException: return SnowparkSQLException( "Internal error: The execution for the last query " "in the Snowflake plan doesn't return a ResultSet.", - "1300", + error_code="1300", ) @staticmethod @@ -306,9 +319,9 @@ def SQL_EXCEPTION_FROM_PROGRAMMING_ERROR( query = getattr(pe, "query", None) return SnowparkSQLException( pe.msg, - "1304", - pe.sfqid, - query, + error_code="1304", + sfqid=pe.sfqid, + query=query, sql_error_code=pe.errno, raw_message=pe.raw_msg, ) @@ -319,8 +332,8 @@ def SQL_EXCEPTION_FROM_OPERATIONAL_ERROR( ) -> SnowparkSQLException: return SnowparkSQLException( oe.msg, - "1305", - oe.sfqid, + error_code="1305", + sfqid=oe.sfqid, sql_error_code=oe.errno, raw_message=oe.raw_msg, ) @@ -335,13 +348,13 @@ def SERVER_CANNOT_FIND_CURRENT_DB_OR_SCHEMA( f"The {v1} is not set for the current session. To set this, either run " f'session.sql("USE {v2}").collect() or set the {v3} connection property in ' f"the dict or properties file that you specify when creating a session.", - "1400", + error_code="1400", ) @staticmethod def SERVER_QUERY_IS_CANCELLED() -> SnowparkQueryCancelledException: return SnowparkQueryCancelledException( - "The query has been cancelled by the user.", "1401" + "The query has been cancelled by the user.", error_code="1401" ) @staticmethod @@ -349,7 +362,7 @@ def SERVER_SESSION_EXPIRED(error_message: str) -> SnowparkSessionException: return SnowparkSessionException( f"Your Snowpark session has expired. You must recreate your " f"session.\n{error_message}", - "1402", + error_code="1402", ) @staticmethod @@ -357,25 +370,27 @@ def SERVER_NO_DEFAULT_SESSION() -> SnowparkSessionException: return SnowparkSessionException( "No default Session is found. " "Please create a session before you call function 'udf' or use decorator '@udf'.", - "1403", + error_code="1403", ) @staticmethod def SERVER_SESSION_HAS_BEEN_CLOSED() -> SnowparkSessionException: return SnowparkSessionException( - "Cannot perform this operation because the session has been closed.", "1404" + "Cannot perform this operation because the session has been closed.", + error_code="1404", ) @staticmethod def SERVER_FAILED_CLOSE_SESSION(message: str) -> SnowparkSessionException: return SnowparkSessionException( - f"Failed to close this session. The error is: {message}", "1405" + f"Failed to close this session. The error is: {message}", error_code="1405" ) @staticmethod def SERVER_FAILED_FETCH_PANDAS(message: str) -> SnowparkFetchDataException: return SnowparkFetchDataException( - f"Failed to fetch a Pandas Dataframe. The error is: {message}", "1406" + f"Failed to fetch a Pandas Dataframe. The error is: {message}", + error_code="1406", ) @staticmethod @@ -392,7 +407,7 @@ def SERVER_UDF_UPLOAD_FILE_STREAM_CLOSED( "consider uploading the large data to a stage, then the " "UDF can be read it from the stage while also retain a " "small size.", - "1407", + error_code="1407", ) @staticmethod @@ -402,7 +417,7 @@ def SERVER_UPLOAD_FILE_STREAM_CLOSED( return SnowparkUploadFileException( "A file stream was closed when uploading files to the server." f"The destination file name is: {dest_filename}. ", - "1408", + error_code="1408", ) @staticmethod @@ -412,21 +427,21 @@ def MORE_THAN_ONE_ACTIVE_SESSIONS() -> SnowparkSessionException: "When you call function 'udf' or use decorator '@udf', " "you must specify the 'session' parameter if you created multiple sessions." "Alternatively, you can use 'session.udf.register' to register UDFs", - "1409", + error_code="1409", ) @staticmethod def DONT_CREATE_SESSION_IN_SP() -> SnowparkSessionException: return SnowparkSessionException( "In a stored procedure, you shouldn't create a session. The stored procedure provides a session.", - "1410", + error_code="1410", ) @staticmethod def DONT_CLOSE_SESSION_IN_SP() -> SnowparkSessionException: return SnowparkSessionException( "In a stored procedure, you shouldn't close a session. The stored procedure manages the lifecycle of the provided session.", - "1411", + error_code="1411", ) # General Error codes 15XX @@ -436,5 +451,5 @@ def GENERAL_INVALID_OBJECT_NAME( type_name: str, ) -> SnowparkInvalidObjectNameException: return SnowparkInvalidObjectNameException( - f"The object name '{type_name}' is invalid.", "1500" + f"The object name '{type_name}' is invalid.", error_code="1500" ) From 2b78bc04255549dac2794190d470fb62d8857287 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 11:00:40 -0700 Subject: [PATCH 5/8] Refactor: Force exception args to be passed as kwargs --- src/snowflake/snowpark/exceptions.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/snowflake/snowpark/exceptions.py b/src/snowflake/snowpark/exceptions.py index c86363ddbae..93c799bd5cc 100644 --- a/src/snowflake/snowpark/exceptions.py +++ b/src/snowflake/snowpark/exceptions.py @@ -16,6 +16,7 @@ class SnowparkClientException(Exception): def __init__( self, message: str, + *, error_code: Optional[str] = None, ) -> None: self.message: str = message @@ -76,6 +77,7 @@ class SnowparkSQLException(SnowparkClientException): def __init__( self, message: str, + *, error_code: Optional[str] = None, sfqid: Optional[str] = None, query: Optional[str] = None, From d347bc233bd1d2ca9b313323372decaac95e1654 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 11:13:51 -0700 Subject: [PATCH 6/8] Refactor: Reuse SnowparkClientException.__init__ in SnowparkSQLException --- src/snowflake/snowpark/exceptions.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/snowflake/snowpark/exceptions.py b/src/snowflake/snowpark/exceptions.py index 93c799bd5cc..ffeb2a22d1a 100644 --- a/src/snowflake/snowpark/exceptions.py +++ b/src/snowflake/snowpark/exceptions.py @@ -84,11 +84,10 @@ def __init__( sql_error_code: Optional[int] = None, raw_message: Optional[str] = None, ) -> None: - self.message: str = message - self.error_code: Optional[str] = error_code + super().__init__(message, error_code=error_code) + self.sfqid: Optional[str] = sfqid self.query: Optional[str] = query - self.telemetry_message: str = message self.sql_error_code = sql_error_code self.raw_message = raw_message From 166abfe2fb523921c99ab33522bbe10850a85ec7 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 11:18:22 -0700 Subject: [PATCH 7/8] Store snowflake-connector-python error in SnowflakeSQLException --- CHANGELOG.md | 4 ++++ .../snowpark/_internal/error_message.py | 18 ++---------------- src/snowflake/snowpark/exceptions.py | 12 ++++++++---- 3 files changed, 14 insertions(+), 20 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eea76a2ca68..cf1b089248f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## 1.10.1 (TBD) +### New Features + +- Add the `conn_error` attribute to `SnowflakeSQLException` that stores the whole underlying exception from `snowflake-connector-python` + ### Bug Fixes - DataFrame column names qouting check now supports newline characters. diff --git a/src/snowflake/snowpark/_internal/error_message.py b/src/snowflake/snowpark/_internal/error_message.py index 8db3b32dda4..39d3d93078c 100644 --- a/src/snowflake/snowpark/_internal/error_message.py +++ b/src/snowflake/snowpark/_internal/error_message.py @@ -316,27 +316,13 @@ def SQL_PYTHON_REPORT_JOIN_AMBIGUOUS( def SQL_EXCEPTION_FROM_PROGRAMMING_ERROR( pe: ProgrammingError, ) -> SnowparkSQLException: - query = getattr(pe, "query", None) - return SnowparkSQLException( - pe.msg, - error_code="1304", - sfqid=pe.sfqid, - query=query, - sql_error_code=pe.errno, - raw_message=pe.raw_msg, - ) + return SnowparkSQLException(pe.msg, error_code="1304", conn_error=pe) @staticmethod def SQL_EXCEPTION_FROM_OPERATIONAL_ERROR( oe: OperationalError, ) -> SnowparkSQLException: - return SnowparkSQLException( - oe.msg, - error_code="1305", - sfqid=oe.sfqid, - sql_error_code=oe.errno, - raw_message=oe.raw_msg, - ) + return SnowparkSQLException(oe.msg, error_code="1305", conn_error=oe) # Server Error Messages 04XX diff --git a/src/snowflake/snowpark/exceptions.py b/src/snowflake/snowpark/exceptions.py index ffeb2a22d1a..ed0e776b3f1 100644 --- a/src/snowflake/snowpark/exceptions.py +++ b/src/snowflake/snowpark/exceptions.py @@ -7,6 +7,8 @@ import logging from typing import Optional +from snowflake.connector.errors import Error as ConnectorError + _logger = logging.getLogger(__name__) @@ -79,6 +81,7 @@ def __init__( message: str, *, error_code: Optional[str] = None, + conn_error: Optional[ConnectorError] = None, sfqid: Optional[str] = None, query: Optional[str] = None, sql_error_code: Optional[int] = None, @@ -86,10 +89,11 @@ def __init__( ) -> None: super().__init__(message, error_code=error_code) - self.sfqid: Optional[str] = sfqid - self.query: Optional[str] = query - self.sql_error_code = sql_error_code - self.raw_message = raw_message + self.conn_error = conn_error + self.sfqid = sfqid or getattr(self.conn_error, "sfqid", None) + self.query = query or getattr(self.conn_error, "query", None) + self.sql_error_code = sql_error_code or getattr(self.conn_error, "errno", None) + self.raw_message = raw_message or getattr(self.conn_error, "raw_msg", None) pretty_error_code = f"({self.error_code}): " if self.error_code else "" pretty_sfqid = f"{self.sfqid}: " if self.sfqid else "" From 67c4c01f2fabe1fa240a61973af4e1ba18e16a97 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Wed, 1 Nov 2023 17:32:27 -0700 Subject: [PATCH 8/8] Expand tests --- tests/unit/test_error_message.py | 44 ++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 11 deletions(-) diff --git a/tests/unit/test_error_message.py b/tests/unit/test_error_message.py index 5877ae74601..a3df558a38e 100644 --- a/tests/unit/test_error_message.py +++ b/tests/unit/test_error_message.py @@ -2,20 +2,42 @@ # Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. # -from snowflake.connector import ProgrammingError +from snowflake.connector import OperationalError, ProgrammingError from snowflake.snowpark._internal.error_message import SnowparkClientExceptionMessages +from snowflake.snowpark.exceptions import SnowparkSQLException -def test_programming_error_sql_exception_attributes(): +def test_sql_exception_from_programming_error(): pe = ProgrammingError( - msg="errmsg", - errno=123456, - sqlstate="P0000", - sfqid="the_query_id", - query="select * from foo", + msg="test message", + errno=123, + sfqid="0000-1111", + query="SELECT CURRENT_USER()", ) - sql_exception = ( - SnowparkClientExceptionMessages.SQL_EXCEPTION_FROM_PROGRAMMING_ERROR(pe) + ex = SnowparkClientExceptionMessages.SQL_EXCEPTION_FROM_PROGRAMMING_ERROR(pe) + assert type(ex) == SnowparkSQLException + assert ex.error_code == "1304" + assert ex.conn_error == pe + assert ex.sfqid == "0000-1111" + assert ex.query == "SELECT CURRENT_USER()" + assert ex.message == "000123: test message" + assert ex.sql_error_code == 123 + assert ex.raw_message == "test message" + + +def test_sql_exception_from_operational_error(): + oe = OperationalError( + msg="test message", + errno=123, + sfqid="0000-1111", + query="SELECT CURRENT_USER()", ) - assert sql_exception.sql_error_code == 123456 - assert sql_exception.raw_message == "errmsg" + ex = SnowparkClientExceptionMessages.SQL_EXCEPTION_FROM_OPERATIONAL_ERROR(oe) + assert type(ex) == SnowparkSQLException + assert ex.error_code == "1305" + assert ex.conn_error == oe + assert ex.sfqid == "0000-1111" + assert ex.query == "SELECT CURRENT_USER()" + assert ex.message == "000123: test message" + assert ex.sql_error_code == 123 + assert ex.raw_message == "test message"