forked from logicalclocks/feature-store-api
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
revreting unwanted spark test _ replace changes
- Loading branch information
Showing
1 changed file
with
13 additions
and
13 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,7 @@ | |
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE_2.0 | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
|
@@ -937,7 +937,7 @@ def test_save_stream_dataframe(self, mocker, backend_fixtures): | |
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.call_args[ | ||
0 | ||
][1] | ||
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}_checkpoint" | ||
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}-checkpoint" | ||
) | ||
assert ( | ||
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.return_value.options.call_args[ | ||
|
@@ -1053,7 +1053,7 @@ def test_save_stream_dataframe_query_name(self, mocker, backend_fixtures): | |
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.call_args[ | ||
0 | ||
][1] | ||
== "/Projects/test_project_name/Resources/test_query_name_checkpoint" | ||
== "/Projects/test_project_name/Resources/test_query_name-checkpoint" | ||
) | ||
assert ( | ||
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.return_value.options.call_args[ | ||
|
@@ -1293,7 +1293,7 @@ def test_save_stream_dataframe_await_termination(self, mocker, backend_fixtures) | |
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.call_args[ | ||
0 | ||
][1] | ||
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}_checkpoint" | ||
== f"/Projects/test_project_name/Resources/{self._get_spark_query_name(project_id, fg)}-checkpoint" | ||
) | ||
assert ( | ||
mock_spark_engine_online_fg_to_avro.return_value.withColumn.return_value.writeStream.outputMode.return_value.format.return_value.option.return_value.options.call_args[ | ||
|
@@ -2456,7 +2456,7 @@ def test_time_series_split_date(self, mocker): | |
d = { | ||
"col_0": [1, 2], | ||
"col_1": ["test_1", "test_2"], | ||
"event_time": ["2017_03_04", "2017_03_05"], | ||
"event_time": ["2017-03-04", "2017-03-05"], | ||
} | ||
df = pd.DataFrame(data=d) | ||
|
||
|
@@ -2516,7 +2516,7 @@ def test_time_series_split_timestamp(self, mocker): | |
d = { | ||
"col_0": [1, 2], | ||
"col_1": ["test_1", "test_2"], | ||
"event_time": ["2017_03_04", "2017_03_05"], | ||
"event_time": ["2017-03-04", "2017-03-05"], | ||
} | ||
df = pd.DataFrame(data=d) | ||
|
||
|
@@ -3809,7 +3809,7 @@ def __init__(self, label, index): | |
"double": ["1"], | ||
"timestamp": [1641340800000], | ||
"boolean": ["False"], | ||
"date": ["2022_01_27"], | ||
"date": ["2022-01-27"], | ||
"binary": ["1"], | ||
"array<string>": [["123"]], | ||
"struc": [LabelIndex("0", "1")], | ||
|
@@ -4212,11 +4212,11 @@ def test_setup_s3_hadoop_conf(self, mocker): | |
"fs.s3a.secret.key", s3_connector.secret_key | ||
) | ||
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call( | ||
"fs.s3a.server_side_encryption_algorithm", | ||
"fs.s3a.server-side-encryption-algorithm", | ||
s3_connector.server_encryption_algorithm, | ||
) | ||
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call( | ||
"fs.s3a.server_side_encryption_key", s3_connector.server_encryption_key | ||
"fs.s3a.server-side-encryption-key", s3_connector.server_encryption_key | ||
) | ||
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call( | ||
"fs.s3a.aws.credentials.provider", | ||
|
@@ -4514,7 +4514,7 @@ def test_setup_gcp_hadoop_conf(self, mocker): | |
|
||
content = ( | ||
'{"type": "service_account", "project_id": "test", "private_key_id": "123456", ' | ||
'"private_key": "_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____", ' | ||
'"private_key": "-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----", ' | ||
'"client_email": "[email protected]"}' | ||
) | ||
credentialsFile = "keyFile.json" | ||
|
@@ -4563,7 +4563,7 @@ def test_setup_gcp_hadoop_conf(self, mocker): | |
) | ||
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call( | ||
"fs.gs.auth.service.account.private.key", | ||
"_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____", | ||
"-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----", | ||
) | ||
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.unset.assert_any_call( | ||
"fs.gs.encryption.algorithm" | ||
|
@@ -4586,7 +4586,7 @@ def test_setup_gcp_hadoop_conf_algorithm(self, mocker): | |
|
||
content = ( | ||
'{"type": "service_account", "project_id": "test", "private_key_id": "123456", ' | ||
'"private_key": "_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____", ' | ||
'"private_key": "-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----", ' | ||
'"client_email": "[email protected]"}' | ||
) | ||
credentialsFile = "keyFile.json" | ||
|
@@ -4650,7 +4650,7 @@ def test_setup_gcp_hadoop_conf_algorithm(self, mocker): | |
) | ||
mock_pyspark_getOrCreate.return_value.sparkContext._jsc.hadoopConfiguration.return_value.set.assert_any_call( | ||
"fs.gs.auth.service.account.private.key", | ||
"_____BEGIN PRIVATE KEY_____test_____END PRIVATE KEY_____", | ||
"-----BEGIN PRIVATE KEY-----test-----END PRIVATE KEY-----", | ||
) | ||
|
||
def test_get_unique_values(self): | ||
|