Skip to content

Commit

Permalink
Merge branch 'master' into brevo-assets-v1.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
apiazza-dd authored Dec 19, 2024
2 parents 72d7954 + 3988f6a commit 4272678
Show file tree
Hide file tree
Showing 115 changed files with 9,594 additions and 171 deletions.
18 changes: 7 additions & 11 deletions .builders/deps/build_dependencies.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
hatchling==1.21.1; python_version > '3.0'
hatchling==0.25.1; python_version < '3.0'
setuptools==75.6.0; python_version > '3.0'
setuptools==40.9.0; python_version < '3.0'
wheel==0.38.4; python_version > '3.0'
wheel==0.37.1; python_version < '3.0'
setuptools-scm; python_version > '3.0'
setuptools-scm==5.0.2; python_version < '3.0'
setuptools-rust>=1.7.0; python_version > '3.0'
maturin; python_version > '3.0'
hatchling==1.21.1
setuptools==75.6.0
wheel==0.38.4
setuptools-scm
setuptools-rust>=1.7.0
maturin
cffi>=1.12
cython==3.0.11
tomli>=2.0.1; python_version > '3.0'
tomli>=2.0.1
9 changes: 9 additions & 0 deletions .codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,10 @@ coverage:
target: 75
flags:
- pulsar
Quarkus:
target: 75
flags:
- quarkus
RabbitMQ:
target: 75
flags:
Expand Down Expand Up @@ -1373,6 +1377,11 @@ flags:
paths:
- pulsar/datadog_checks/pulsar
- pulsar/tests
quarkus:
carryforward: true
paths:
- quarkus/datadog_checks/quarkus
- quarkus/tests
rabbitmq:
carryforward: true
paths:
Expand Down
6 changes: 3 additions & 3 deletions .deps/image_digests.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"linux-aarch64": "sha256:e09d2c24c54286a660b8fab396f8a69dc58907551dda5623cb69212e29565019",
"linux-x86_64": "sha256:e5b6ea5078b388bfd3f6e99d3573bbd4d4491020613de23f6fc23d535f14387f",
"windows-x86_64": "sha256:d6fbd8983205cb9af2d17ee47110e985572e8d47617a1cb5a0cbbbfeb2e30292"
"linux-aarch64": "sha256:03314aedd5b8a67258d476984629004e52f7299123897a83fd2aee8c13a7995a",
"linux-x86_64": "sha256:01a85cfe9b017760a3d485e5e2d01ef263f807797cb8b0d6ce10a9ed76a2026b",
"windows-x86_64": "sha256:869dd119f9b08b08cd21abeec3ae5b2b4c8967dc6ae699c0503ffe1e50bd939b"
}
8 changes: 4 additions & 4 deletions .deps/resolved/macos-x86_64_3.12.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-n
clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp312-cp312-macosx_10_9_x86_64.whl#sha256=261fc1b0bf349de66b2d9e3d367879a561b516ca8e54e85e0c27b7c1a4f639b4
clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl#sha256=fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0
cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20241216144620-py3-none-macosx_10_12_universal2.whl#sha256=72f55306e2e3df9291ee55e3a6b2f6698fe3999db9570a14da0ea56bbf51e5a9
confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.6.1-20241216144621-cp312-cp312-macosx_10_13_universal2.whl#sha256=5c740bdec13e86f43e17f6e2c557f4bb01ca7d6b3177258e174f390e930cdac9
confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.6.1-20241217110206-cp312-cp312-macosx_10_13_universal2.whl#sha256=88ae1ef90bd70a5b83c7890890ebc736dc02e9246a01b2e21840ffd69eac31dd
cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl#sha256=ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d
ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e
ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp312-cp312-macosx_12_0_x86_64.whl#sha256=401f77b0564c3f990b58b9f21055331ca9efcdfa06dfa6ccff13cf21f8329ba5
Expand Down Expand Up @@ -50,7 +50,7 @@ lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp312-cp31
mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5
msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.1-py3-none-any.whl#sha256=29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17
msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d
netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20241216144621-cp312-cp312-macosx_10_13_universal2.whl#sha256=a0eb73c6cfb7f780f4a6a5eee55883d134abe5cb586aec14ea60be9a0992f7d5
netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20241217110207-cp312-cp312-macosx_10_13_universal2.whl#sha256=b9bed2e4521f4546495e04d8f2b95085a9c83e659ec5785a084e8659f3b57889
oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca
openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157
opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.29.0-py3-none-any.whl#sha256=5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8
Expand All @@ -76,7 +76,7 @@ pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2
pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl#sha256=595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231
pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850
pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526
pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20241216144622-cp312-cp312-macosx_10_13_universal2.whl#sha256=eb15c1c967739474579c72f669d747ff2bd1b037d856ce121684385ef4a64c64
pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20241217110207-cp312-cp312-macosx_10_13_universal2.whl#sha256=8e35a9b5f25877bbc08c2c17491e239780f10009a00c991d7099d9fbb8699e7d
pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c
pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl#sha256=401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1
pyodbc @ https://agent-int-packages.datadoghq.com/external/pyodbc/pyodbc-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=d3d9cc4af703c4817b6e604315910b0cf5dcb68056d52b25ca072dd59c52dcbc
Expand Down Expand Up @@ -120,7 +120,7 @@ tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-a
typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d
tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8
uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297
uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20241216144622-cp312-cp312-macosx_10_13_universal2.whl#sha256=e7dc4c6f3718f59260348f0a94b62d975e7957ffecef6db06df4a9f2f21327be
uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20241217110208-cp312-cp312-macosx_10_13_universal2.whl#sha256=f58a728a54278b54a114f7d4afb5864e629cf7c4afd070f5468bb9fd6d118653
urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac
vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b
websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526
Expand Down
2 changes: 1 addition & 1 deletion .deps/resolved/windows-x86_64_3.12.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-n
clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp312-cp312-win_amd64.whl#sha256=0409917be29f5ad80a6772712fce954b5e81450555636e8523290ee9740a2dbb
clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl#sha256=de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18
cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402154627-py3-none-win_amd64.whl#sha256=1743b32a221d2a0804b4e425ffd53468e8f1754da217fe1e7bd9ff7800fd90f8
confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.6.1-20241216144609-cp312-cp312-win_amd64.whl#sha256=799b27682db545bfb695a9311319177eb08d4b2c4b60addfb13f4c8993f6b29b
confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.6.1-20241217110200-cp312-cp312-win_amd64.whl#sha256=d01ab115eeacd12a46bb6a0257d3795659410950359c9b8b41ed5a506d7a00d7
cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.1-cp39-abi3-win_amd64.whl#sha256=d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb
ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e
ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp312-cp312-win_amd64.whl#sha256=bb183a535e5b24828a45901babd9fd15a1350c9d5096de5ba463287d0c8c64d1
Expand Down
5 changes: 5 additions & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,11 @@ datadog_checks_base/datadog_checks/base/checks/windows/ @DataDog/wi
/temporal_cloud/manifest.json @DataDog/saas-integrations @DataDog/documentation
/temporal_cloud/metadata.csv @DataDog/saas-integrations @DataDog/documentation

/temporal_cloud/ @DataDog/saas-integrations
/temporal_cloud/*.md @DataDog/saas-integrations @DataDog/documentation
/temporal_cloud/manifest.json @DataDog/saas-integrations @DataDog/documentation
/temporal_cloud/metadata.csv @DataDog/saas-integrations @DataDog/documentation

/trend_micro_email_security/ @DataDog/saas-integrations
/trend_micro_email_security/*.md @DataDog/saas-integrations @DataDog/documentation
/trend_micro_email_security/manifest.json @DataDog/saas-integrations @DataDog/documentation
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/config/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -441,6 +441,8 @@ integration/proxysql:
- proxysql/**/*
integration/pulsar:
- pulsar/**/*
integration/quarkus:
- quarkus/**/*
integration/rabbitmq:
- rabbitmq/**/*
integration/ray:
Expand Down
20 changes: 20 additions & 0 deletions .github/workflows/test-all.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2994,6 +2994,26 @@ jobs:
minimum-base-package: ${{ inputs.minimum-base-package }}
pytest-args: ${{ inputs.pytest-args }}
secrets: inherit
jcc156e5:
uses: ./.github/workflows/test-target.yml
with:
job-name: Quarkus
target: quarkus
platform: linux
runner: '["ubuntu-22.04"]'
repo: "${{ inputs.repo }}"
python-version: "${{ inputs.python-version }}"
standard: ${{ inputs.standard }}
latest: ${{ inputs.latest }}
agent-image: "${{ inputs.agent-image }}"
agent-image-py2: "${{ inputs.agent-image-py2 }}"
agent-image-windows: "${{ inputs.agent-image-windows }}"
agent-image-windows-py2: "${{ inputs.agent-image-windows-py2 }}"
test-py2: ${{ inputs.test-py2 }}
test-py3: ${{ inputs.test-py3 }}
minimum-base-package: ${{ inputs.minimum-base-package }}
pytest-args: ${{ inputs.pytest-args }}
secrets: inherit
j694032b:
uses: ./.github/workflows/test-target.yml
with:
Expand Down
1 change: 0 additions & 1 deletion airflow/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,6 @@ Connect Airflow to DogStatsD (included in the Datadog Agent) by using the Airflo
tags:
dag_id: "$1"
task_id: "$2"
- match: "airflow.pool.open_slots.*"
- match: "airflow.dagrun.*.first_task_scheduling_delay"
name: "airflow.dagrun.first_task_scheduling_delay"
tags:
Expand Down
6 changes: 6 additions & 0 deletions airflow/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@ files:
description: The URL used to connect to the Airflow instance (use the Airflow web server REST API endpoint).
value:
type: string
- name: collect_ongoing_duration
required: false
description: Collect ongoing duration metric for DAG task instances.
value:
type: boolean
example: true
- template: instances/http
- template: instances/default
- template: logs
Expand Down
1 change: 1 addition & 0 deletions airflow/changelog.d/19278.added
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Use `start_date` instead of `execution_date` for ongoing duration metrics
8 changes: 4 additions & 4 deletions airflow/datadog_checks/airflow/airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def __init__(self, name, init_config, instances):

self._url = self.instance.get('url', '')
self._tags = self.instance.get('tags', [])

self._collect_ongoing_duration = self.instance.get('collect_ongoing_duration', True)
# The Agent only makes one attempt to instantiate each AgentCheck so any errors occurring
# in `__init__` are logged just once, making it difficult to spot. Therefore, we emit
# potential configuration errors as part of the check run phase.
Expand Down Expand Up @@ -51,7 +51,7 @@ def check(self, _):
else:
submit_metrics(resp, tags)
# Only calculate task duration for stable API
if target_url is url_stable:
if target_url is url_stable and self._collect_ongoing_duration:
task_instances = self._get_all_task_instances(url_stable_task_instances, tags)
if task_instances:
self._calculate_task_ongoing_duration(task_instances, tags)
Expand Down Expand Up @@ -118,14 +118,14 @@ def _calculate_task_ongoing_duration(self, tasks, tags):
dag_task_tags = copy(tags)
task_id = task.get('task_id')
dag_id = task.get('dag_id')
execution_date = task.get('execution_date')
start_date = task.get('start_date')

# Add tags for each task
dag_task_tags.append('dag_id:{}'.format(dag_id))
dag_task_tags.append('task_id:{}'.format(task_id))

# Calculate ongoing duration
ongoing_duration = get_timestamp() - datetime.fromisoformat((execution_date)).timestamp()
ongoing_duration = get_timestamp() - datetime.fromisoformat((start_date)).timestamp()
self.gauge('airflow.dag.task.ongoing_duration', ongoing_duration, tags=dag_task_tags)

def _parse_config(self):
Expand Down
4 changes: 4 additions & 0 deletions airflow/datadog_checks/airflow/config_models/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ def instance_auth_type():
return 'basic'


def instance_collect_ongoing_duration():
return True


def instance_disable_generic_tags():
return False

Expand Down
1 change: 1 addition & 0 deletions airflow/datadog_checks/airflow/config_models/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ class InstanceConfig(BaseModel):
aws_host: Optional[str] = None
aws_region: Optional[str] = None
aws_service: Optional[str] = None
collect_ongoing_duration: Optional[bool] = None
connect_timeout: Optional[float] = None
disable_generic_tags: Optional[bool] = None
empty_default_hostname: Optional[bool] = None
Expand Down
5 changes: 5 additions & 0 deletions airflow/datadog_checks/airflow/data/conf.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,11 @@ instances:
#
- url: <URL>

## @param collect_ongoing_duration - boolean - optional - default: true
## Collect ongoing duration metric for DAG task instances.
#
# collect_ongoing_duration: true

## @param proxy - mapping - optional
## This overrides the `proxy` setting in `init_config`.
##
Expand Down
41 changes: 41 additions & 0 deletions airflow/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,44 @@ def test_dag_task_ongoing_duration(aggregator, task_instance):
tags=['key:my-tag', 'url:http://localhost:8080', 'dag_id:tutorial', 'task_id:sleep'],
count=1,
)


@pytest.mark.parametrize(
"collect_ongoing_duration, should_call_method",
[
pytest.param(
True,
[
mock.call(
'http://localhost:8080/api/v1/dags/~/dagRuns/~/taskInstances?state=running',
['url:http://localhost:8080', 'key:my-tag'],
)
],
id="collect",
),
pytest.param(
False,
[],
id="don't collect",
),
],
)
def test_config_collect_ongoing_duration(collect_ongoing_duration, should_call_method):
instance = {**common.FULL_CONFIG['instances'][0], 'collect_ongoing_duration': collect_ongoing_duration}
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])

with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'):
with mock.patch('datadog_checks.base.utils.http.requests') as req:
mock_resp = mock.MagicMock(status_code=200)
mock_resp.json.side_effect = [
{'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}},
]
req.get.return_value = mock_resp

with mock.patch(
'datadog_checks.airflow.airflow.AirflowCheck._get_all_task_instances'
) as mock_get_all_task_instances:
check.check(None)

# Assert method calls
mock_get_all_task_instances.assert_has_calls(should_call_method, any_order=False)
Loading

0 comments on commit 4272678

Please sign in to comment.