Skip to content

Commit

Permalink
Merge pull request #334 from splunk/contentctl_5
Browse files Browse the repository at this point in the history
contentctl 5 - Step 4 - ESCU 5.0
This PR has not been fully reviewed and we anticipate some issues. However, in order to enable testing in a number of different workflows, we are released it as an alpha.
  • Loading branch information
pyth0n1c authored Jan 18, 2025
2 parents c26e492 + 248e436 commit c75f3cd
Show file tree
Hide file tree
Showing 73 changed files with 1,785 additions and 1,505 deletions.
5 changes: 5 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,8 @@ updates:
schedule:
interval: "daily"
open-pull-requests-limit: 6
- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every week
interval: "weekly"
22 changes: 22 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: lint & format
on:
pull_request:
types: [opened, reopened, synchronize]

jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install ruff
run: |
python -m pip install --upgrade pip
pip install ruff
- name: Run lint
run: ruff check --output-format=github contentctl/
- name: Run Formatter
run: ruff format --check contentctl/
7 changes: 3 additions & 4 deletions .github/workflows/testEndToEnd.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
name: testEndToEnd
on:
push:
pull_request:
types: [opened, reopened]
types: [opened, reopened, synchronize]
schedule:
- cron: "44 4 * * *"

Expand All @@ -11,8 +10,8 @@ jobs:
strategy:
fail-fast: false
matrix:
python_version: ["3.11", "3.12"]
operating_system: ["ubuntu-20.04", "ubuntu-22.04", "macos-latest", "macos-14", "windows-2022"]
python_version: ["3.11", "3.12", "3.13"]
operating_system: ["ubuntu-24.04", "macos-15", "windows-2022"]
#operating_system: ["ubuntu-20.04", "ubuntu-22.04", "macos-latest"]


Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/test_against_escu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,8 @@
# note yet been fixed in security_content, we may see this workflow fail.
name: test_against_escu
on:
push:
pull_request:
types: [opened, reopened]
types: [opened, reopened, synchronize]
schedule:
- cron: "44 4 * * *"

Expand All @@ -17,9 +16,9 @@ jobs:
strategy:
fail-fast: false
matrix:
python_version: ["3.11", "3.12"]
python_version: ["3.11", "3.12", "3.13"]

operating_system: ["ubuntu-20.04", "ubuntu-22.04", "macos-latest", "macos-14"]
operating_system: ["ubuntu-24.04", "macos-15"]
# Do not test against ESCU until known character encoding issue is resolved
# operating_system: ["ubuntu-20.04", "ubuntu-22.04", "macos-latest", "macos-14", "windows-2022"]

Expand All @@ -36,6 +35,7 @@ jobs:
with:
path: security_content
repository: splunk/security_content
ref: rba_migration

#Install the given version of Python we will test against
- name: Install Required Python Version
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ poetry.lock
# usual mac files
.DS_Store
*/.DS_Store
.ruff_cache

# custom
dist/*
Expand Down
16 changes: 16 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 # Use the ref you want to point at
hooks:
- id: check-json
- id: check-symlinks
- id: check-yaml
- id: detect-aws-credentials
- id: detect-private-key
- id: forbid-submodules
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.2
hooks:
- id: ruff
args: [ --fix ]
- id: ruff-format
5 changes: 5 additions & 0 deletions .vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"recommendations": [
"charliermarsh.ruff"
]
}
10 changes: 9 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,15 @@
"python.testing.cwd": "${workspaceFolder}",
"python.languageServer": "Pylance",
"python.analysis.typeCheckingMode": "strict",
"editor.defaultFormatter": "ms-python.black-formatter"
"[python]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit",
"source.organizeImports": "explicit"
},
"editor.defaultFormatter": "charliermarsh.ruff",
},
"ruff.nativeServer": "on"


}
66 changes: 39 additions & 27 deletions contentctl/actions/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,17 @@

from dataclasses import dataclass

from contentctl.objects.enums import SecurityContentProduct, SecurityContentType
from contentctl.objects.enums import SecurityContentType
from contentctl.input.director import Director, DirectorOutputDto
from contentctl.output.conf_output import ConfOutput
from contentctl.output.conf_writer import ConfWriter
from contentctl.output.api_json_output import ApiJsonOutput
from contentctl.output.data_source_writer import DataSourceWriter
from contentctl.objects.lookup import Lookup
from contentctl.objects.lookup import CSVLookup, Lookup_Type
import pathlib
import json
import datetime
from typing import Union
import uuid

from contentctl.objects.config import build

Expand All @@ -34,27 +34,41 @@ def execute(self, input_dto: BuildInputDto) -> DirectorOutputDto:
updated_conf_files:set[pathlib.Path] = set()
conf_output = ConfOutput(input_dto.config)


# Construct a path to a YML that does not actually exist.
# We mock this "fake" path since the YML does not exist.
# This ensures the checking for the existence of the CSV is correct
data_sources_fake_yml_path = input_dto.config.getPackageDirectoryPath() / "lookups" / "data_sources.yml"

# Construct a special lookup whose CSV is created at runtime and
# written directly into the output folder. It is created with model_construct,
# not model_validate, because the CSV does not exist yet.
# written directly into the lookups folder. We will delete this after a build,
# assuming that it is successful.
data_sources_lookup_csv_path = input_dto.config.getPackageDirectoryPath() / "lookups" / "data_sources.csv"
DataSourceWriter.writeDataSourceCsv(input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path)
input_dto.director_output_dto.addContentToDictMappings(Lookup.model_construct(description= "A lookup file that will contain the data source objects for detections.",
filename=data_sources_lookup_csv_path,
name="data_sources"))



DataSourceWriter.writeDataSourceCsv(input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path)
input_dto.director_output_dto.addContentToDictMappings(CSVLookup.model_construct(name="data_sources",
id=uuid.UUID("b45c1403-6e09-47b0-824f-cf6e44f15ac8"),
version=1,
author=input_dto.config.app.author_name,
date = datetime.date.today(),
description= "A lookup file that will contain the data source objects for detections.",
lookup_type=Lookup_Type.csv,
file_path=data_sources_fake_yml_path))
updated_conf_files.update(conf_output.writeHeaders())
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.detections, SecurityContentType.detections))
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.stories, SecurityContentType.stories))
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.baselines, SecurityContentType.baselines))
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.investigations, SecurityContentType.investigations))
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.lookups, SecurityContentType.lookups))
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.macros, SecurityContentType.macros))
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.dashboards, SecurityContentType.dashboards))
updated_conf_files.update(conf_output.writeLookups(input_dto.director_output_dto.lookups))
updated_conf_files.update(conf_output.writeDetections(input_dto.director_output_dto.detections))
updated_conf_files.update(conf_output.writeStories(input_dto.director_output_dto.stories))
updated_conf_files.update(conf_output.writeBaselines(input_dto.director_output_dto.baselines))
updated_conf_files.update(conf_output.writeInvestigations(input_dto.director_output_dto.investigations))
updated_conf_files.update(conf_output.writeMacros(input_dto.director_output_dto.macros))
updated_conf_files.update(conf_output.writeDashboards(input_dto.director_output_dto.dashboards))
updated_conf_files.update(conf_output.writeMiscellaneousAppFiles())




#Ensure that the conf file we just generated/update is syntactically valid
for conf_file in updated_conf_files:
ConfWriter.validateConfFile(conf_file)
Expand All @@ -67,17 +81,15 @@ def execute(self, input_dto: BuildInputDto) -> DirectorOutputDto:
if input_dto.config.build_api:
shutil.rmtree(input_dto.config.getAPIPath(), ignore_errors=True)
input_dto.config.getAPIPath().mkdir(parents=True)
api_json_output = ApiJsonOutput()
for output_objects, output_type in [(input_dto.director_output_dto.detections, SecurityContentType.detections),
(input_dto.director_output_dto.stories, SecurityContentType.stories),
(input_dto.director_output_dto.baselines, SecurityContentType.baselines),
(input_dto.director_output_dto.investigations, SecurityContentType.investigations),
(input_dto.director_output_dto.lookups, SecurityContentType.lookups),
(input_dto.director_output_dto.macros, SecurityContentType.macros),
(input_dto.director_output_dto.deployments, SecurityContentType.deployments)]:
api_json_output.writeObjects(output_objects, input_dto.config.getAPIPath(), input_dto.config.app.label, output_type )


api_json_output = ApiJsonOutput(input_dto.config.getAPIPath(), input_dto.config.app.label)
api_json_output.writeDetections(input_dto.director_output_dto.detections)
api_json_output.writeStories(input_dto.director_output_dto.stories)
api_json_output.writeBaselines(input_dto.director_output_dto.baselines)
api_json_output.writeInvestigations(input_dto.director_output_dto.investigations)
api_json_output.writeLookups(input_dto.director_output_dto.lookups)
api_json_output.writeMacros(input_dto.director_output_dto.macros)
api_json_output.writeDeployments(input_dto.director_output_dto.deployments)


#create version file for sse api
version_file = input_dto.config.getAPIPath()/"version.json"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructureServer import DetectionTestingInfrastructureServer
from urllib.parse import urlparse
from copy import deepcopy
from contentctl.objects.enums import DetectionTestingTargetInfrastructure
import signal
import datetime
# from queue import Queue
Expand Down
Loading

0 comments on commit c75f3cd

Please sign in to comment.