Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #2

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@
# specific language governing permissions and limitations
# under the License.
---
default_stages: [commit, push]
default_stages: [pre-commit, pre-push]
default_language_version:
# force all unspecified python hooks to run python3
python: python3
minimum_pre_commit_version: "1.20.0"
repos:
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.1.9
rev: v1.5.5
hooks:
- id: forbid-tabs
exclude: ^docs/Makefile$|^clients/gen/go.sh
Expand Down Expand Up @@ -138,7 +138,7 @@ repos:
- --fuzzy-match-generates-todo
files: \.mermaid$
- repo: https://github.com/thlorenz/doctoc.git
rev: v1.4.0
rev: v2.2.0
hooks:
- id: doctoc
name: Add TOC for md files
Expand All @@ -156,14 +156,14 @@ repos:
hooks:
- id: check-hooks-apply
- repo: https://github.com/psf/black
rev: 20.8b1
rev: 24.10.0
hooks:
- id: black
files: api_connexion/.*\.py|.*providers.*\.py|^chart/tests/.*\.py
exclude: .*kubernetes_pod\.py|.*google/common/hooks/base_google\.py$
args: [--config=./pyproject.toml]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.3.0
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: debug-statements
Expand All @@ -178,29 +178,29 @@ repos:
args:
- --remove
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.6.0
rev: v1.10.0
hooks:
- id: rst-backticks
- id: python-no-log-warn
- repo: https://github.com/adrienverge/yamllint
rev: v1.25.0
rev: v1.35.1
hooks:
- id: yamllint
name: Check yaml files with yamllint
entry: yamllint -c yamllint-config.yml --strict
types: [yaml]
exclude:
^.*init_git_sync\.template\.yaml$|^.*airflow\.template\.yaml$|^chart/(?:templates|files)/.*\.yaml
- repo: https://github.com/timothycrosley/isort
rev: 5.6.4
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
name: Run isort to sort imports
types: [python]
# To keep consistent with the global isort skip config defined in setup.cfg
exclude: ^build/.*$|^.tox/.*$|^venv/.*$|.*api_connexion/.*\.py|.*providers.*\.py
- repo: https://github.com/pycqa/pydocstyle
rev: 5.1.1
rev: 6.3.0
hooks:
- id: pydocstyle
name: Run pydocstyle
Expand Down
8 changes: 5 additions & 3 deletions airflow/api_connexion/endpoints/pool_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,11 @@ def patch_pool(pool_name, session, update_mask=None):
_patch_body = {}
try:
update_mask = [
pool_schema.declared_fields[field].attribute
if pool_schema.declared_fields[field].attribute
else field
(
pool_schema.declared_fields[field].attribute
if pool_schema.declared_fields[field].attribute
else field
)
for field in update_mask
]
except KeyError as err:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,11 @@ def _remove_sample_data_from_s3():
teardown__task_remove_sample_data_from_s3 = PythonOperator(
python_callable=_remove_sample_data_from_s3, task_id='teardown__remove_sample_data_from_s3'
)
[setup__task_add_sample_data_to_s3, setup__task_create_table] >> task_transfer_s3_to_redshift >> [
teardown__task_drop_table,
teardown__task_remove_sample_data_from_s3,
]
(
[setup__task_add_sample_data_to_s3, setup__task_create_table]
>> task_transfer_s3_to_redshift
>> [
teardown__task_drop_table,
teardown__task_remove_sample_data_from_s3,
]
)
25 changes: 16 additions & 9 deletions airflow/providers/dingding/example_dags/example_dingding.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,12 +205,19 @@ def failure_callback(context):
message="",
)

[
text_msg_remind_none,
text_msg_remind_specific,
text_msg_remind_include_invalid,
text_msg_remind_all,
] >> link_msg >> markdown_msg >> [
single_action_card_msg,
multi_action_card_msg,
] >> feed_card_msg >> msg_failure_callback
(
[
text_msg_remind_none,
text_msg_remind_specific,
text_msg_remind_include_invalid,
text_msg_remind_all,
]
>> link_msg
>> markdown_msg
>> [
single_action_card_msg,
multi_action_card_msg,
]
>> feed_card_msg
>> msg_failure_callback
)
2 changes: 1 addition & 1 deletion airflow/providers/elasticsearch/log/es_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def set_context(self, ti: TaskInstance) -> None:
'execution_date': self._clean_execution_date(ti.execution_date),
'try_number': str(ti.try_number),
'log_id': self._render_log_id(ti, ti.try_number),
'offset': int(time() * (10 ** 9)),
'offset': int(time() * (10**9)),
},
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,10 +168,18 @@

create_dataset >> patch_dataset >> update_dataset >> get_dataset >> get_dataset_result >> delete_dataset

update_dataset >> create_table >> create_view >> [
get_dataset_tables,
delete_view,
] >> update_table >> delete_table >> delete_dataset
(
update_dataset
>> create_table
>> create_view
>> [
get_dataset_tables,
delete_view,
]
>> update_table
>> delete_table
>> delete_dataset
)
update_dataset >> create_external_table >> delete_dataset

with models.DAG(
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -2755,7 +2755,7 @@ def _bind_parameters(operation: str, parameters: dict) -> str:
"""Helper method that binds parameters to a SQL query"""
# inspired by MySQL Python Connector (conversion.py)
string_parameters = {} # type Dict[str, str]
for (name, value) in parameters.items():
for name, value in parameters.items():
if value is None:
string_parameters[name] = 'NULL'
elif isinstance(value, str):
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/google/cloud/hooks/mlengine.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,13 @@ def _poll_with_exponential_delay(request, execute_num_retries, max_n, is_done_fu
log.info('Operation is done: %s', response)
return response

time.sleep((2 ** i) + (random.randint(0, 1000) / 1000))
time.sleep((2**i) + (random.randint(0, 1000) / 1000))
except HttpError as e:
if e.resp.status != 429:
log.info('Something went wrong. Not retrying: %s', format(e))
raise
else:
time.sleep((2 ** i) + (random.randint(0, 1000) / 1000))
time.sleep((2**i) + (random.randint(0, 1000) / 1000))

raise ValueError('Connection could not be established after {} retries.'.format(max_n))

Expand Down
8 changes: 5 additions & 3 deletions airflow/providers/mongo/hooks/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,11 @@ def __init__(self, conn_id: str = 'mongo_default', *args, **kwargs) -> None:

self.uri = '{scheme}://{creds}{host}{port}/{database}'.format(
scheme=scheme,
creds='{}:{}@'.format(self.connection.login, self.connection.password)
if self.connection.login
else '',
creds=(
'{}:{}@'.format(self.connection.login, self.connection.password)
if self.connection.login
else ''
),
host=self.connection.host,
port='' if self.connection.port is None else ':{}'.format(self.connection.port),
database=self.connection.schema,
Expand Down
16 changes: 10 additions & 6 deletions airflow/providers/snowflake/example_dags/example_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,13 @@

# [END howto_operator_snowflake_to_slack]

snowflake_op_sql_str >> [
snowflake_op_with_params,
snowflake_op_sql_list,
snowflake_op_template_file,
copy_into_table,
] >> slack_report
(
snowflake_op_sql_str
>> [
snowflake_op_with_params,
snowflake_op_sql_list,
snowflake_op_template_file,
copy_into_table,
]
>> slack_report
)
4 changes: 2 additions & 2 deletions tests/providers/apache/hive/transfers/test_s3_to_hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def test_execute(self, mock_hiveclihook):
conn.create_bucket(Bucket='bucket')

# Testing txt, zip, bz2 files with and without header row
for (ext, has_header) in product(['.txt', '.gz', '.bz2', '.GZ'], [True, False]):
for ext, has_header in product(['.txt', '.gz', '.bz2', '.GZ'], [True, False]):
self.kwargs['headers'] = has_header
self.kwargs['check_headers'] = has_header
logging.info("Testing %s format %s header", ext, 'with' if has_header else 'without')
Expand Down Expand Up @@ -248,7 +248,7 @@ def test_execute_with_select_expression(self, mock_hiveclihook):
# Only testing S3ToHiveTransfer calls S3Hook.select_key with
# the right parameters and its execute method succeeds here,
# since Moto doesn't support select_object_content as of 1.3.2.
for (ext, has_header) in product(['.txt', '.gz', '.GZ'], [True, False]):
for ext, has_header in product(['.txt', '.gz', '.GZ'], [True, False]):
input_compressed = ext.lower() != '.txt'
key = self.s3_key + ext

Expand Down
2 changes: 1 addition & 1 deletion tests/providers/jenkins/hooks/test_jenkins.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
class TestJenkinsHook(unittest.TestCase):
@mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
def test_client_created_default_http(self, get_connection_mock):
"""tests `init` method to validate http client creation when all parameters are passed """
"""tests `init` method to validate http client creation when all parameters are passed"""
default_connection_id = 'jenkins_default'

connection_host = 'http://test.com'
Expand Down
12 changes: 6 additions & 6 deletions tests/providers/slack/hooks/test_slack.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

class TestSlackHook(unittest.TestCase):
def test_get_token_with_token_only(self):
"""tests `__get_token` method when only token is provided """
"""tests `__get_token` method when only token is provided"""
# Given
test_token = 'test_token'
test_conn_id = None
Expand All @@ -43,7 +43,7 @@ def test_get_token_with_token_only(self):
@mock.patch('airflow.providers.slack.hooks.slack.WebClient')
@mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock_slack_client):
"""tests `__get_token` method when only connection is provided """
"""tests `__get_token` method when only connection is provided"""
# Given
test_token = None
test_conn_id = 'x'
Expand All @@ -63,7 +63,7 @@ def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock

@mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock):
"""tests `__get_token` method when only connection is provided """
"""tests `__get_token` method when only connection is provided"""

# Mock
conn = mock.Mock()
Expand All @@ -75,7 +75,7 @@ def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock

@mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
def test_get_token_with_empty_password_slack_conn_id_only(self, get_connection_mock):
"""tests `__get_token` method when only connection is provided """
"""tests `__get_token` method when only connection is provided"""

# Mock
get_connection_mock.return_value = mock.Mock(password=None)
Expand All @@ -84,7 +84,7 @@ def test_get_token_with_empty_password_slack_conn_id_only(self, get_connection_m
self.assertRaises(AirflowException, SlackHook, token=None, slack_conn_id='x')

def test_get_token_with_token_and_slack_conn_id(self):
"""tests `__get_token` method when both arguments are provided """
"""tests `__get_token` method when both arguments are provided"""
# Given
test_token = 'test_token'
test_conn_id = 'x'
Expand All @@ -98,7 +98,7 @@ def test_get_token_with_token_and_slack_conn_id(self):
self.assertEqual(output, expected)

def test_get_token_with_out_token_nor_slack_conn_id(self):
"""tests `__get_token` method when no arguments are provided """
"""tests `__get_token` method when no arguments are provided"""

self.assertRaises(AirflowException, SlackHook, token=None, slack_conn_id=None)

Expand Down
3 changes: 1 addition & 2 deletions tests/providers/yandex/hooks/test_yandex.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class TestYandexHook(unittest.TestCase):
@mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
@mock.patch('airflow.providers.yandex.hooks.yandex.YandexCloudBaseHook._get_credentials')
def test_client_created_without_exceptions(self, get_credentials_mock, get_connection_mock):
"""tests `init` method to validate client creation when all parameters are passed """
"""tests `init` method to validate client creation when all parameters are passed"""

# Inputs to constructor
default_folder_id = 'test_id'
Expand All @@ -46,7 +46,6 @@ def test_client_created_without_exceptions(self, get_credentials_mock, get_conne

@mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
def test_get_credentials_raise_exception(self, get_connection_mock):

"""tests 'get_credentials' method raising exception if none of the required fields are passed."""

# Inputs to constructor
Expand Down
Loading