Skip to content

Commit

Permalink
Merge branch 'develop' into dependabot/pip/connectors/aws/urllib3-1.2…
Browse files Browse the repository at this point in the history
…6.19
  • Loading branch information
DerekRushton authored Sep 17, 2024
2 parents a9408dc + 975291e commit b5a880c
Show file tree
Hide file tree
Showing 20 changed files with 146 additions and 29 deletions.
8 changes: 8 additions & 0 deletions .contrast-scan.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"excludes": [
"**/automation_tests/**",
"**/tests/**",
"**/test/**",
"**/reference_connector/**"
]
}
19 changes: 19 additions & 0 deletions catalog-info.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
---
apiVersion: backstage.io/v1alpha1
kind: Component
metadata:
name: 'cp4s-car-connectors'
description: Core open-source library and connectors used by CAR
tags:
- stix
- python
- interoperability
annotations:
github.com/project-slug: IBM/cp4s-car-connectors
jira/component: "Connector Factory"
backstage.io/techdocs-ref: dir:.
spec:
type: library
lifecycle: production
owner: connectorfactory
---
3 changes: 1 addition & 2 deletions connectors/atp/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
adal==1.2.2
car-connector-framework==4.0.1
urllib3==1.26.18
car-connector-framework==4.0.2
python-dateutil==2.8.2
3 changes: 1 addition & 2 deletions connectors/aws/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
boto3==1.24.83
adal==1.2.2
car-connector-framework==4.0.1
urllib3==1.26.19
car-connector-framework==4.0.2
python-dateutil==2.8.2
4 changes: 1 addition & 3 deletions connectors/azure/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,2 @@
adal==1.2.6
car-connector-framework==4.0.1
urllib3==1.26.18

car-connector-framework==4.0.2
10 changes: 10 additions & 0 deletions connectors/crowdstrike/connector/data_handler.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

import datetime

from car_framework.context import context
Expand Down Expand Up @@ -52,6 +53,15 @@ def group_host_sensor_apps(applications):
group_apps[agent_id].append(app)
return group_apps

def remove_duplicates(items):
"""Remove any duplicate hosts from the array of hosts"""
context().logger.debug("Removing duplicates from list with length: %s", len(items))
unique_items = []
for item in items:
if item not in unique_items:
unique_items.append(item)
context().logger.debug("Returning %s unique items", len(unique_items))
return unique_items

class DataHandler(BaseDataHandler):

Expand Down
87 changes: 81 additions & 6 deletions connectors/crowdstrike/connector/server_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from car_framework.context import context
from car_framework.util import DatasourceFailure
from connector.data_handler import epoch_to_datetime_conv, deep_get
from connector.data_handler import epoch_to_datetime_conv, deep_get, remove_duplicates
from falconpy import Discover
from falconpy import SpotlightVulnerabilities

Expand Down Expand Up @@ -42,13 +42,19 @@ def get_hosts(self, last_model_state_id=None):
returns:
hosts(list): host list
"""
context().logger.info("Beginning import of hosts from the CrowdStrike Falcon environment.")
hosts = []
try:
discover = Discover(client_id=self.client_id, client_secret=self.client_secret, base_url=self.base_url)
offset = 0
result_filter = ""
sort = "last_seen_timestamp|asc"
if last_model_state_id:
result_filter = f"last_seen_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'"
host_lookup = discover.query_hosts(filter=result_filter, limit=DISCOVER_PAGE_SIZE)
context().logger.debug("Beginning hosts query using filter: %s", result_filter)
host_lookup = discover.query_hosts(filter=result_filter, limit=DISCOVER_PAGE_SIZE, sort=sort)
total = host_lookup['body']['meta']['pagination']['total']
context().logger.debug("Completed hosts query. Total hosts = %s", total)
is_paged_result = True
while is_paged_result:
if host_lookup["status_code"] == 200:
Expand All @@ -59,13 +65,41 @@ def get_hosts(self, last_model_state_id=None):
error_detail = host_lookup["body"]["errors"]
for err in error_detail:
raise DatasourceFailure(err["message"], err["code"])
if len(hosts) < host_lookup['body']['meta']['pagination']['total']:
offset = len(hosts)
host_lookup = discover.query_hosts(filter=result_filter, limit=DISCOVER_PAGE_SIZE, offset=offset)

if len(host_detail["body"]["resources"]) == DISCOVER_PAGE_SIZE:
'''CrowdStrike's Discover endpoint has the following limitation: limit + offset <= 10,000
For that reason we need to do additional queries to get all hosts. We do this by making
additional queries and filter on the last_seen_timestamp. This also means we have the
potential of collecting duplicate hosts. So we make sure to remove these duplicates.'''
if DISCOVER_PAGE_SIZE + offset > 10000:
offset = 0
last_added = deep_get(hosts[-1], ['last_seen_timestamp'])
last_added_filter = "last_seen_timestamp:>='" + last_added + "'"
if result_filter == "":
updated_filter = last_added_filter
else:
updated_filter = result_filter + " + " + last_added_filter

context().logger.debug("The limit + offset is greater then 10000. Making additional hosts query using filter: %s", updated_filter)
host_lookup = discover.query_hosts(filter=updated_filter, limit=DISCOVER_PAGE_SIZE, sort=sort)
context().logger.debug("Hosts query using updated filter is complete.")
else:
offset = len(hosts)
context().logger.debug("Beginning hosts query using offset of %s", offset)
host_lookup = discover.query_hosts(filter=result_filter, limit=DISCOVER_PAGE_SIZE, offset=offset, sort=sort)
context().logger.debug("Hosts query using offset complete")
else:
is_paged_result = False

if len(hosts) > total:
# Need to remove potential duplicates due to the additional grouping we do if the total number of hosts is >10,000
context().logger.info("Removing duplicate hosts added during import.")
hosts = remove_duplicates(hosts)

except Exception as ex:
raise DatasourceFailure(ex)
raise DatasourceFailure(ex)

context().logger.info("Imported %s hosts from the CrowdStrike Falcon environment.", len(hosts))
return hosts

def get_applications(self, app_filter=None, last_model_state_id=None):
Expand All @@ -76,6 +110,9 @@ def get_applications(self, app_filter=None, last_model_state_id=None):
returns:
applications(list): application list
"""
if app_filter is None:
# Adding this log message into an if statement because it can be called thousands of times adding noise to the logs when the app_filter is being used.
context().logger.info("Beginning import of applications from the CrowdStrike Falcon environment.")
applications = []
try:
discover = Discover(client_id=self.client_id, client_secret=self.client_secret, base_url=self.base_url)
Expand All @@ -86,7 +123,10 @@ def get_applications(self, app_filter=None, last_model_state_id=None):
installed_app_filter = f"last_updated_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'"
used_app_filter = f"last_used_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'"
result_filter = f"{installed_app_filter},{used_app_filter}"
context().logger.debug("Beginning applications query using filter: %s", result_filter)
app_lookup = discover.query_applications(filter=result_filter, limit=DISCOVER_PAGE_SIZE)
total = app_lookup['body']['meta']['pagination']['total']
context().logger.debug("Completed applications query. Total applications = %s", total)
is_paged_result = True
while is_paged_result:
if app_lookup["status_code"] == 200:
Expand All @@ -99,12 +139,19 @@ def get_applications(self, app_filter=None, last_model_state_id=None):
raise DatasourceFailure(err["message"], err["code"])
if len(applications) < app_lookup['body']['meta']['pagination']['total']:
offset = len(applications)
context().logger.debug("Beginning applications query using offset of %s", offset)
app_lookup = discover.query_applications(filter=result_filter, limit=DISCOVER_PAGE_SIZE,
offset=offset)
context().logger.debug("Applications query using offset complete")
else:
is_paged_result = False
except Exception as ex:
raise DatasourceFailure(ex)

if app_filter is None:
# Adding this log message into an if statement because it can be called thousands of times adding noise to the logs when the app_filter is being used.
context().logger.info("Imported %s applications from the CrowdStrike Falcon environment.", len(applications))

return applications

def get_accounts(self, last_model_state_id=None):
Expand All @@ -115,6 +162,7 @@ def get_accounts(self, last_model_state_id=None):
returns:
accounts(list): account list
"""
context().logger.info("Beginning import of accounts from the CrowdStrike Falcon environment.")
accounts = []
try:
discover = Discover(client_id=self.client_id, client_secret=self.client_secret, base_url=self.base_url)
Expand All @@ -123,7 +171,10 @@ def get_accounts(self, last_model_state_id=None):
result_filter = f"first_seen_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'," \
f"last_successful_login_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'"

context().logger.debug("Beginning accounts query using filter: %s", result_filter)
account_lookup = discover.query_accounts(filter=result_filter, limit=DISCOVER_PAGE_SIZE)
total = account_lookup['body']['meta']['pagination']['total']
context().logger.debug("Completed accounts query. Total accounts = %s", total)
is_paged_result = True
while is_paged_result:
if account_lookup["status_code"] == 200:
Expand All @@ -136,12 +187,17 @@ def get_accounts(self, last_model_state_id=None):
raise DatasourceFailure(err["message"], err["code"])
if len(accounts) < account_lookup['body']['meta']['pagination']['total']:
offset = len(accounts)
context().logger.debug("Beginning accounts query using offset of %s", offset)
account_lookup = discover.query_accounts(filter=result_filter, limit=DISCOVER_PAGE_SIZE,
offset=offset)
context().logger.debug("Accounts query using offset complete")
else:
is_paged_result = False
except Exception as ex:
raise DatasourceFailure(ex)

context().logger.info("Imported %s accounts from the CrowdStrike Falcon environment.", len(accounts))

return accounts

def get_logins(self, user_accounts):
Expand All @@ -152,14 +208,18 @@ def get_logins(self, user_accounts):
returns:
account_login(list): account login event list
"""
context().logger.info("Beginning import of user logins from the CrowdStrike Falcon environment.")
user_logins = []
try:
discover = Discover(client_id=self.client_id, client_secret=self.client_secret, base_url=self.base_url)
account_ids = [account['id'] for account in user_accounts if account]
for i in range(0, len(account_ids), 100):
res_filter = "', account_id:'".join(account_ids[i:i+100])
context().logger.debug("Beginning logins query using filter: %s", res_filter)
login_lookup = discover.query_logins(filter=f"account_id:'{res_filter}'", sort='login_timestamp.desc',
limit=DISCOVER_PAGE_SIZE)
total = login_lookup['body']['meta']['pagination']['total']
context().logger.debug("Completed logins query. Total logins = %s", total)
is_paged_result = True
login_events = []
user_ids = []
Expand All @@ -185,6 +245,9 @@ def get_logins(self, user_accounts):
is_paged_result = False
except Exception as ex:
raise DatasourceFailure(ex)

context().logger.info("Imported %s user logins from the CrowdStrike Falcon environment.", len(user_logins))

return user_logins

def get_vulnerabilities(self, last_model_state_id=None):
Expand All @@ -195,6 +258,7 @@ def get_vulnerabilities(self, last_model_state_id=None):
returns:
vulnerabilities(list): vulnerability list
"""
context().logger.info("Beginning import of vulnerability information from the CrowdStrike Falcon environment.")
vulnerabilities = []
try:
spotlight = SpotlightVulnerabilities(client_id=self.client_id, client_secret=self.client_secret,
Expand All @@ -205,8 +269,11 @@ def get_vulnerabilities(self, last_model_state_id=None):
result_filter = f"updated_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'+" \
f"(created_timestamp:>'{epoch_to_datetime_conv(last_model_state_id)}'," \
f"status:['open', 'closed', 'reopen'])"
context().logger.debug("Beginning vulnerabilities query using filter: %s", result_filter)
vuln_lookup = spotlight.query_vulnerabilities_combined(filter=result_filter, facet=facet,
limit=SPOTLIGHT_PAGE_SIZE)
total = vuln_lookup['body']['meta']['pagination']['total']
context().logger.debug("Completed vulnerabilities query. Total vulnerabilities = %s", total)
after = 'true'
while after:
if vuln_lookup["status_code"] == 200:
Expand All @@ -218,14 +285,20 @@ def get_vulnerabilities(self, last_model_state_id=None):
raise DatasourceFailure(err["message"], err["code"])
if 'after' in vuln_lookup['body']['meta']['pagination']:
after = vuln_lookup['body']['meta']['pagination']['after']
context().logger.debug("Beginning vulnerabilities query using after of %s", after)
vuln_lookup = spotlight.query_vulnerabilities_combined(filter=result_filter, facet=facet,
limit=SPOTLIGHT_PAGE_SIZE, after=after)
context().logger.debug("Vulnerabilities query using after complete")
except Exception as ex:
raise DatasourceFailure(ex)

context().logger.info("Imported %s vulnerabilities from the CrowdStrike Falcon environment.", len(vulnerabilities))

return vulnerabilities

def get_vulnerable_applications(self, vulnerabilities, agent_application_map):
"""add application id details to the vulnerability apps sections"""
context().logger.info("Adding application details to the application vulnerabities section")
for vuln in vulnerabilities:
agent_id = vuln['aid']
for vuln_app in vuln['apps']:
Expand Down Expand Up @@ -261,3 +334,5 @@ def get_vulnerable_applications(self, vulnerabilities, agent_application_map):
for app in app_details:
app_id = app['id']
vuln_app['app_id'] = app_id

context().logger.info("Adding application details to the application vulnerabities section is complete.")
3 changes: 1 addition & 2 deletions connectors/crowdstrike/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
car-connector-framework==4.0.1
urllib3==1.26.18
car-connector-framework==4.0.2
crowdstrike-falconpy==1.2.15
pytest==7.2.2
2 changes: 1 addition & 1 deletion connectors/cybereason/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
car-connector-framework==3.0.0
urllib3==1.26.18
urllib3==1.26.19
2 changes: 1 addition & 1 deletion connectors/gcp/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ google-cloud-securitycenter==1.19.1
google-cloud-logging==3.5.0
google-auth==2.17.1
google-api-python-client==2.74.0
urllib3==1.26.18
urllib3==1.26.19
pytest==7.2.2
3 changes: 1 addition & 2 deletions connectors/nozomi/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
car-connector-framework==4.0.1
urllib3==1.26.18
car-connector-framework==4.0.2
pytest==7.2.2
2 changes: 1 addition & 1 deletion connectors/okta/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
car-connector-framework==3.0.0
httpagentparser==1.9.2
urllib3==1.26.18
urllib3==1.26.19
2 changes: 1 addition & 1 deletion connectors/proofpoint/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
car-connector-framework==3.0.0
urllib3==1.26.18
urllib3==1.26.19
5 changes: 4 additions & 1 deletion connectors/qualys/connector/data_handler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import datetime
import re
from html2text import html2text
from car_framework.context import context
from car_framework.data_handler import BaseDataHandler
Expand Down Expand Up @@ -253,7 +254,9 @@ def handle_macaddress(self, obj):
for interface in network_interface['list']:
res = self.copy_fields(obj, )
key = interface['HostAssetInterface'].get('macAddress', "")
if not key:
#Added a check on the mac address to ensure it's a valid format.
#If performance issues show up, we can match on only not "null", but this covers additional cases.
if not key or not re.match("[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", key.lower()):
continue
res['_key'] = key
res['interface'] = interface['HostAssetInterface'].get('interfaceName', "")
Expand Down
2 changes: 1 addition & 1 deletion connectors/qualys/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ car-connector-framework==3.0.0
xmltodict==0.12.0
pytest==7.0.0
html2text==2020.1.16
urllib3==1.26.18
urllib3==1.26.19
11 changes: 10 additions & 1 deletion connectors/qualys/tests/mock_api/host_asset.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,16 @@
"hostname": "EC2AMZ",
"address": "fe80:0:0:0:f558:672:7853:f978",
"interfaceName": "AWS PV Network Device #0",
"macAddress": "77:71:7D:72:77:77"
"macAddress": "77:77:77:77:77:77"
}
},
{
"HostAssetInterface": {
"gatewayAddress": "177.77.77.7",
"hostname": "EC2AMZ",
"address": "fe80:0:0:0:f558:672:7853:f978",
"interfaceName": "AWS PV Network Device #0",
"macAddress": "null"
}
}]
},
Expand Down
2 changes: 1 addition & 1 deletion connectors/randori/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
car-connector-framework==3.0.0
urllib3==1.26.18
urllib3==1.26.19
git+https://github.com/RandoriDev/randori-api-sdk.git
3 changes: 1 addition & 2 deletions connectors/reference_connector/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
car-connector-framework==4.0.1
urllib3==1.26.18
car-connector-framework==4.0.2
2 changes: 1 addition & 1 deletion connectors/rhacs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
car-connector-framework==3.0.0
pytest==7.0.0
requests-toolbelt==0.10.1
urllib3==1.26.18
urllib3==1.26.19
2 changes: 1 addition & 1 deletion connectors/tanium/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
car-connector-framework==4.0.1
car-connector-framework==4.0.2
python-dateutil==2.8.2

0 comments on commit b5a880c

Please sign in to comment.