Skip to content

Commit

Permalink
Applied test_params to all recipes
Browse files Browse the repository at this point in the history
  • Loading branch information
ramo-j committed Jul 26, 2024
1 parent 35920f4 commit 387d16d
Show file tree
Hide file tree
Showing 50 changed files with 98 additions and 49 deletions.
1 change: 1 addition & 0 deletions data/recipes/aws_disk_to_gcp.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "aws_disk_to_gcp",
"short_description": "Copies EBS volumes from within AWS, and transfers them to GCP.",
"description": "Copies EBS volumes from within AWS by pushing them to an AWS S3 bucket. The S3 bucket is then copied to a Google Cloud Storage bucket, from which a GCP Disk Image and finally a GCP Persistent Disk are created. This operation happens in the cloud and doesn't touch the local workstation on which the recipe is run.",
"test_params": "ap-southeast-2 australia-southeast2-a vol-01234567 s3://aws-bucket gs://gcp-bucket",
"preflights": [
{
"wants": [],
Expand Down
1 change: 1 addition & 0 deletions data/recipes/aws_logging_collect.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "aws_logging_collect",
"short_description": "Collects logs from an AWS account and dumps the results to the filesystem.",
"description": "Collects logs from an AWS account using a specified query filter and date ranges, and dumps them on the filesystem. If no args are provided this recipe will collect 90 days of logs for the default AWS profile.",
"test_params": "ap-southeast-2",
"preflights": [{
"wants": [],
"name": "AWSAccountCheck",
Expand Down
3 changes: 2 additions & 1 deletion data/recipes/aws_logging_ts.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "aws_logging_ts",
"short_description": "Collects logs from an AWS account, processes the logs with Plaso and uploads the result to Timesketch.",
"description": "Collects logs from an AWS account using a specified query filter and date ranges, processes the logs with plaso and uploads the result to Timesketch. If no args are provided this recipe will collect 90 days of logs for the default AWS profile.",
"test_params": "ap-southeast-2",
"preflights": [{
"wants": [],
"name": "AWSAccountCheck",
Expand Down Expand Up @@ -47,7 +48,7 @@
["--start_time", "Start time for the query.", null, {"format": "datetime", "before": "@end_time"}],
["--end_time", "End time for the query.", null, {"format": "datetime_end", "after": "@start_time"}],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null],
Expand Down
3 changes: 2 additions & 1 deletion data/recipes/aws_turbinia_ts.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "aws_turbinia_ts",
"short_description": "Copies EBS volumes from within AWS, transfers them to GCP, analyses with Turbinia and exports the results to Timesketch.",
"description": "Copies EBS volumes from within AWS, uses buckets and cloud-to-cloud operations to transfer the data to GCP. Once in GCP, a persistent disk is created and a job is added to the Turbinia queue to start analysis. The resulting Plaso file is then exported to Timesketch.",
"test_params": "ap-southeast-2 australia-southeast2-a vol-01234567 s3://aws-bucket gs://gcp-bucket",
"preflights": [
{
"wants": [],
Expand Down Expand Up @@ -98,7 +99,7 @@
["--gcp_project", "Destination GCP project.", null, {"format": "regex", "comma_separated": false, "regex": "^[a-z][-a-z0-9]{4,28}[a-z0-9]$"}],
["--aws_profile", "Source AWS profile.", null],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null],
Expand Down
3 changes: 2 additions & 1 deletion data/recipes/azure_forensics.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "azure_forensics",
"short_description": "Copies a disk from an Azure account to an analysis VM.",
"description": "Copies a disk from an Azure account, creates an analysis VM in Azure (with a startup script containing installation instructions for basic forensics tooling), and attaches the copied disk to it.",
"test_params": "remote_profile_name analysis_resource_group_name incident_id ssh_public_key",
"modules": [{
"wants": [],
"name": "AzureCollector",
Expand All @@ -28,7 +29,7 @@
["--instance_name", "Instance name of the instance to analyze.", null],
["--disk_names", "Comma-separated list of disk names to copy.", null],
["--all_disks", "Copy all disks in the designated instance. Overrides `disk_names` if specified.", false],
["--boot_disk_size", "The size of the analysis VM's boot disk (in GB).", 50, {"format": "regex", "regex": "^\\d+$"}],
["--boot_disk_size", "The size of the analysis VM's boot disk (in GB).", "50", {"format": "integer"}],
["--analysis_region", "The Azure region in which to create the VM.", null, {"format": "azure_region"}],
["--analysis_profile_name", "Name of the Azure profile to use when creating the analysis VM.", null]
]
Expand Down
1 change: 1 addition & 0 deletions data/recipes/azure_logging_collect.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "azure_logging_collect",
"short_description": "Collects logs from an Azure subscription and dumps the results to the filesystem.",
"description": "Collects logs from an Azure subscription using a specified filter, and dumps them on the filesystem.",
"test_params": "subscription_id filter_expression",
"modules": [{
"wants": [],
"name": "AzureLogsCollector",
Expand Down
3 changes: 2 additions & 1 deletion data/recipes/azure_logging_ts.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "azure_logging_ts",
"short_description": "Collects logs from an Azure subscription, processes the logs with Plaso and uploads the result to Timesketch.",
"description": "Collects logs from an Azure subscription using a specified query filter and date ranges, processes the logs with plaso and uploads the result to Timesketch.",
"test_params": "subscription_id filter_expression",
"preflights": [],
"modules": [{
"wants": [],
Expand Down Expand Up @@ -37,7 +38,7 @@
["filter_expression", "A filter expression to use for the log query, must specify at least a start date like \"eventTimestamp ge '2022-02-01'\"", null],
["--profile_name", "A profile name to use when looking for Azure credentials.", null],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null],
Expand Down
1 change: 1 addition & 0 deletions data/recipes/bigquery_collect.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "bigquery_collect",
"short_description": "Collects results from BigQuery and dumps them on the filesystem.",
"description": "Collects results from BigQuery in a GCP project and dumps them in JSONL on the local filesystem.",
"test_params": "projectfolder.projectname query description",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down
3 changes: 2 additions & 1 deletion data/recipes/bigquery_ts.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "bigquery_ts",
"short_description": "Collects results from BigQuery and uploads them to Timesketch.",
"description": "Collects results from BigQuery in JSONL form, dumps them to the filesystem, and uploads them to Timesketch.",
"test_params": "prohectfolder.projectname query description",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down Expand Up @@ -38,7 +39,7 @@
["query", "Query to execute.", null],
["description", "Human-readable description of the query.", null],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null],
Expand Down
1 change: 1 addition & 0 deletions data/recipes/gce_disk_copy.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gce_disk_copy",
"short_description": "Copy disks from one project to another.",
"description": "Copies disks from one project to another. The disks can be specified individually, or instances can be specified, to copy all their disks or boot disks.",
"test_params": "projectname",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down
1 change: 1 addition & 0 deletions data/recipes/gce_disk_export.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gce_disk_export",
"short_description": "Export a disk image from a GCP project to a Google Cloud Storage bucket.",
"description": "Creates a disk image from Google Compute persistent disks, compresses the images, and exports them to Google Cloud Storage.\n\nThe exported images names are appended by `.tar.gz.`\n\nAs this export happens through a Cloud Build job, the default service account `[PROJECT-NUMBER]@cloudbuild.gserviceaccount.com` in the source or analysis project (if provided) must have the IAM role `[Storage Admin]` on their corresponding project's storage bucket/folder.",
"test_params": "sourceproject gs://bucket",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down
1 change: 1 addition & 0 deletions data/recipes/gcp_cloud_resource_tree.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gcp_cloud_resource_tree",
"description": "Generates a parent/children tree for given GCP resource by enumerating all the currently available resources. It also will attempt to fill any gaps identified in the tree through querying the GCP logs",
"short_description": "Generates a parent/children tree for given GCP resource.",
"test_params": "projectid australia-southeast2-a resource_type",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down
2 changes: 1 addition & 1 deletion data/recipes/gcp_cloud_resource_tree_offline.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gcp_cloud_resource_tree_offline",
"description": "Generates a parent/children tree for given GCP resource using the supplied exported GCP logs",
"short_description": "Generates a parent/children tree for given GCP resource using the supplied exported GCP logs",
"test_params": "projectid australia-southeast2-a resource_type paths",
"modules": [{
"wants": [],
"name": "FilesystemCollector",
Expand All @@ -27,6 +28,5 @@
["paths", "Comma-separated paths to GCP log files. Log files should contain log entiries in json format.", null],
["--resource_id","Resource id", null],
["--resource_name","Resource name", null]

]
}
1 change: 1 addition & 0 deletions data/recipes/gcp_disk_export_dd.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gce_disk_export_dd",
"short_description": "Stream the disk bytes from a GCP project to a Google Cloud Storage bucket.",
"description": "The export is performed via bit streaming the the disk bytes to GCS. This will allow getting a disk image out of the project in case both organization policies `constraints/compute.storageResourceUseRestrictions` and `constraints/compute.trustedImageProjects` are enforced and in case OsLogin is allowed only for the organization users while the analyst is an external user with no roles/`compute.osLoginExternalUser` role.\n\nThe exported images names are appended by `.tar.gz.`\n\nThe compute engine default service account in the source project must have sufficient permissions to Create and List Storage objects on the corresponding storage bucket/folder.",
"test_params": "projectname gs://bucket",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down
5 changes: 3 additions & 2 deletions data/recipes/gcp_forensics.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gcp_forensics",
"short_description": "Copies disk from a GCP project to an analysis VM.",
"description": "Copies a persistent disk from a GCP project to another, creates an analysis VM (with a startup script containing installation instructions for basic forensics tooling) in the destination project, and attaches the copied GCP persistent disk to it.",
"test_params": "source-project-name analysis-project-name",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down Expand Up @@ -56,8 +57,8 @@
["--all_disks", "Copy all disks in the designated instance. Overrides `disk_names` if specified.", false],
["--stop_instances", "Stop the designated instance after copying disks.", false],
["--create_analysis_vm", "Create an analysis VM in the destination project.", true],
["--cpu_cores", "Number of CPU cores of the analysis VM.", 4, {"format": "regex", "regex": "^\\d+$"}],
["--boot_disk_size", "The size of the analysis VM boot disk (in GB).", 50, {"format": "regex", "regex": "^\\d+$"}],
["--cpu_cores", "Number of CPU cores of the analysis VM.", "4", {"format": "integer"}],
["--boot_disk_size", "The size of the analysis VM boot disk (in GB).", "50", {"format": "integer"}],
["--boot_disk_type", "Disk type to use [pd-standard, pd-ssd].", "pd-standard", {"format": "regex", "regex": "^pd-((ssd)|(standard))$"}],
["--zone", "The GCP zone where the Analysis VM and copied disks will be created.", "us-central1-f", {"format": "gcp_zone"}]
]
Expand Down
5 changes: 3 additions & 2 deletions data/recipes/gcp_logging_cloudaudit_ts.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gcp_logging_cloudaudit_ts",
"short_description": "Collects GCP logs from a project and exports them to Timesketch.",
"description": "Collects GCP logs from a project and exports them to Timesketch. Some light processing is made to translate the logs into something Timesketch can process.",
"test_params": "project-name 2024-01-01 2024-01-31",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand Down Expand Up @@ -46,13 +47,13 @@
["start_date", "Start date.", null, {"format": "datetime", "before": "@end_date"}],
["end_date", "End date.", null, {"format": "datetime_end", "after": "@start_date"}],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null],
["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""],
["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true],
["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false],
["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}]
["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}]
]
}
7 changes: 4 additions & 3 deletions data/recipes/gcp_logging_cloudsql_ts.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
"name": "gcp_logging_cloudsql_ts",
"short_description": "Collects GCP related to Cloud SQL instances in a project and exports them to Timesketch.",
"description": "Collects GCP related to Cloud SQL instances in a project and exports them to Timesketch. Some light processing is made to translate the logs into something Timesketch can process.",
"preflights": [{
"test_params": "project-name 2024-01-01 2024-01-31",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
"args": {
Expand Down Expand Up @@ -50,13 +51,13 @@
["start_date", "Start date.", null, {"format": "datetime", "before": "@end_date"}],
["end_date", "End date.", null, {"format": "datetime_end", "after": "@start_date"}],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null],
["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""],
["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true],
["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false],
["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}]
["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}]
]
}
3 changes: 2 additions & 1 deletion data/recipes/gcp_logging_collect.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "gcp_logging_collect",
"description": "Collects logs from a GCP project and dumps on the filesystem.",
"short_description": "Collects logs from a GCP project and dumps on the filesystem (JSON). https://cloud.google.com/logging/docs/view/query-library for example queries.",
"test_params": "project-name filter_expression",
"preflights": [{
"wants": [],
"name": "GCPTokenCheck",
Expand All @@ -25,6 +26,6 @@
["project_name", "Name of the GCP project to collect logs from.", null, {"format": "regex", "comma_separated": false, "regex": "^[a-z][-a-z0-9]{4,28}[a-z0-9]$"}],
["filter_expression", "Filter expression to use to query GCP logs. See https://cloud.google.com/logging/docs/view/query-library for examples.", "resource.type = 'gce_instance'"],
["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false],
["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}]
["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}]
]
}
Loading

0 comments on commit 387d16d

Please sign in to comment.