From 1b179761f9627c90e42976bfd280a0cf2275af29 Mon Sep 17 00:00:00 2001 From: ramoj Date: Mon, 22 Jul 2024 00:18:15 +0000 Subject: [PATCH 1/9] Expand enumerated tests --- tests/cli/main_tool.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index 4fe358dc4..f4843a1b2 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -104,6 +104,9 @@ def testToolWithArbitraryRecipe(self): @parameterized.named_parameters(_EnumerateRecipeNames()) def testRecipeSetupArgs(self, recipe_name): + self._testRecipeSetupArgs(recipe_name) + + def _testRecipeSetupArgs(self, recipe_name): """Checks that all recipes pass the correct arguments to their modules.""" # We want to access the tool's state object to load recipes and go through # modules. @@ -129,6 +132,9 @@ def testRecipeSetupArgs(self, recipe_name): @parameterized.named_parameters(_EnumerateRecipeNames()) def testRecipeValidators(self, recipe_name): + self._testRecipeValidators(recipe_name) + + def _testRecipeValidators(self, recipe_name): """Tests that recipes do not specify invalid validators.""" # pylint: disable=protected-access self.tool._state = dftw_state.DFTimewolfState(config.Config) From 611446f78e2e27ed07a5830266084ac11996ee6c Mon Sep 17 00:00:00 2001 From: ramoj Date: Mon, 22 Jul 2024 01:30:31 +0000 Subject: [PATCH 2/9] Start of test expansion for arg validation per recipe --- data/recipes/aws_forensics.json | 3 ++- dftimewolf/lib/resources.py | 3 +++ tests/cli/main_tool.py | 7 ++++++- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/data/recipes/aws_forensics.json b/data/recipes/aws_forensics.json index 51f9afdce..2e67faecf 100644 --- a/data/recipes/aws_forensics.json +++ b/data/recipes/aws_forensics.json @@ -2,6 +2,7 @@ "name": "aws_forensics", "short_description": "Copies a volume from an AWS account to an analysis VM.", "description": "Copies a volume from an AWS account, creates an analysis VM in AWS (with a startup script containing installation instructions for basic forensics tooling), and attaches the copied volume to it.", + "test_params": "default us-east-1 incident_id --instance_id i-01234567 --volume_ids vol-01234567", "modules": [{ "wants": [], "name": "AWSCollector", @@ -26,7 +27,7 @@ ["--instance_id", "Instance ID of the instance to analyze.", null, {"format": "regex", "comma_separated": false, "regex": "^i-[0-9a-f]{8,17}$"}], ["--volume_ids", "Comma-separated list of volume IDs to copy.", null, {"format": "regex", "comma_separated": true, "regex": "^vol-[0-9a-f]{8,17}$"}], ["--all_volumes", "Copy all volumes in the designated instance. Overrides volume_ids if specified.", false], - ["--boot_volume_size", "The size of the analysis VM boot volume (in GB).", 50, {"format": "regex", "regex": "^\\d+$"}], + ["--boot_volume_size", "The size of the analysis VM boot volume (in GB).", "50", {"format": "regex", "regex": "^\\d+$"}], ["--analysis_zone", "The AWS zone in which to create the VM.", null, {"format": "aws_region"}], ["--analysis_profile_name", "Name of the AWS profile to use when creating the analysis VM.", null] ] diff --git a/dftimewolf/lib/resources.py b/dftimewolf/lib/resources.py index 686a604e5..f570da91a 100644 --- a/dftimewolf/lib/resources.py +++ b/dftimewolf/lib/resources.py @@ -62,3 +62,6 @@ def GetHelpString(self) -> str: short_description = self.contents.get( 'short_description', 'No description') return ' {0:<35s}{1:s}\n'.format(self.name, short_description) + + def GetTestParams(self) -> list[str]: + return self.contents.get('test_params', '').split(' ') diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index f4843a1b2..13ad94866 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -70,7 +70,7 @@ def _EnumerateRecipeNames(): tool = _CreateToolObject() # pylint: disable=protected-access for recipe in tool._recipes_manager.GetRecipes(): - yield (recipe.name, recipe.name) + yield (f'_{recipe.name}', recipe.name) class MainToolTest(parameterized.TestCase): @@ -140,6 +140,9 @@ def _testRecipeValidators(self, recipe_name): self.tool._state = dftw_state.DFTimewolfState(config.Config) recipe = self.tool._recipes_manager.Recipes()[recipe_name] + recipe_args = [recipe_name] + recipe.GetTestParams() + self.tool.ParseArguments(recipe_args) + self.tool._state.LoadRecipe(recipe.contents, dftimewolf_recipes.MODULES) for arg in recipe.args: if arg.validation_params: @@ -148,6 +151,8 @@ def _testRecipeValidators(self, recipe_name): validators_manager.ValidatorsManager.ListValidators(), f'Error in {recipe.name}:{arg.switch} - ' f'Invalid validator {arg.validation_params["format"]}.') + + self.tool.ValidateArguments() def testRecipeWithNestedArgs(self): """Tests that a recipe with args referenced in other args is populated.""" From 92b38385041b26830805ebf53fe18ff8a199a87c Mon Sep 17 00:00:00 2001 From: ramoj Date: Mon, 22 Jul 2024 01:39:08 +0000 Subject: [PATCH 3/9] Small update --- dftimewolf/lib/resources.py | 8 +++++--- tests/cli/main_tool.py | 10 +++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/dftimewolf/lib/resources.py b/dftimewolf/lib/resources.py index f570da91a..a2146668d 100644 --- a/dftimewolf/lib/resources.py +++ b/dftimewolf/lib/resources.py @@ -2,7 +2,7 @@ """Various dfTimewolf resource objects.""" import dataclasses -from typing import Any, Dict, Sequence +from typing import Any, Dict, Sequence, Optional @dataclasses.dataclass @@ -63,5 +63,7 @@ def GetHelpString(self) -> str: 'short_description', 'No description') return ' {0:<35s}{1:s}\n'.format(self.name, short_description) - def GetTestParams(self) -> list[str]: - return self.contents.get('test_params', '').split(' ') + def GetTestParams(self) -> Optional[list[str]]: + if self.contents.get('test_params', None): + return self.contents.get('test_params', '').split(' ') + return None diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index 13ad94866..39caedb3e 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -140,8 +140,11 @@ def _testRecipeValidators(self, recipe_name): self.tool._state = dftw_state.DFTimewolfState(config.Config) recipe = self.tool._recipes_manager.Recipes()[recipe_name] - recipe_args = [recipe_name] + recipe.GetTestParams() - self.tool.ParseArguments(recipe_args) + test_params = recipe.GetTestParams() + if test_params: + print("Here I am") + recipe_args = [recipe_name] + test_params + self.tool.ParseArguments(recipe_args) self.tool._state.LoadRecipe(recipe.contents, dftimewolf_recipes.MODULES) for arg in recipe.args: @@ -152,7 +155,8 @@ def _testRecipeValidators(self, recipe_name): f'Error in {recipe.name}:{arg.switch} - ' f'Invalid validator {arg.validation_params["format"]}.') - self.tool.ValidateArguments() + if test_params: + self.tool.ValidateArguments() def testRecipeWithNestedArgs(self): """Tests that a recipe with args referenced in other args is populated.""" From ab6304f2a2902dfca23af56429f4d103532e889e Mon Sep 17 00:00:00 2001 From: ramoj Date: Mon, 22 Jul 2024 01:40:40 +0000 Subject: [PATCH 4/9] Remove debug print --- tests/cli/main_tool.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index 39caedb3e..0b1783255 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -142,7 +142,6 @@ def _testRecipeValidators(self, recipe_name): test_params = recipe.GetTestParams() if test_params: - print("Here I am") recipe_args = [recipe_name] + test_params self.tool.ParseArguments(recipe_args) From 64c9b09bbca7a8467676e08300c70085c27f270a Mon Sep 17 00:00:00 2001 From: ramoj Date: Thu, 25 Jul 2024 04:30:07 +0000 Subject: [PATCH 5/9] Added an integer validator --- data/recipes/aws_forensics.json | 4 +-- dftimewolf/lib/resources.py | 3 +- dftimewolf/lib/validators/integer.py | 38 +++++++++++++++++++++ tests/lib/validators/integer.py | 49 ++++++++++++++++++++++++++++ 4 files changed, 91 insertions(+), 3 deletions(-) create mode 100644 dftimewolf/lib/validators/integer.py create mode 100644 tests/lib/validators/integer.py diff --git a/data/recipes/aws_forensics.json b/data/recipes/aws_forensics.json index 2e67faecf..930cb2770 100644 --- a/data/recipes/aws_forensics.json +++ b/data/recipes/aws_forensics.json @@ -16,7 +16,7 @@ "volume_ids": "@volume_ids", "all_volumes": "@all_volumes", "boot_volume_size": "@boot_volume_size", - "cpu_cores": 16, + "cpu_cores": "16", "ami": null } }], @@ -27,7 +27,7 @@ ["--instance_id", "Instance ID of the instance to analyze.", null, {"format": "regex", "comma_separated": false, "regex": "^i-[0-9a-f]{8,17}$"}], ["--volume_ids", "Comma-separated list of volume IDs to copy.", null, {"format": "regex", "comma_separated": true, "regex": "^vol-[0-9a-f]{8,17}$"}], ["--all_volumes", "Copy all volumes in the designated instance. Overrides volume_ids if specified.", false], - ["--boot_volume_size", "The size of the analysis VM boot volume (in GB).", "50", {"format": "regex", "regex": "^\\d+$"}], + ["--boot_volume_size", "The size of the analysis VM boot volume (in GB).", "50", {"format": "integer"}], ["--analysis_zone", "The AWS zone in which to create the VM.", null, {"format": "aws_region"}], ["--analysis_profile_name", "Name of the AWS profile to use when creating the analysis VM.", null] ] diff --git a/dftimewolf/lib/resources.py b/dftimewolf/lib/resources.py index a2146668d..b7753e29c 100644 --- a/dftimewolf/lib/resources.py +++ b/dftimewolf/lib/resources.py @@ -64,6 +64,7 @@ def GetHelpString(self) -> str: return ' {0:<35s}{1:s}\n'.format(self.name, short_description) def GetTestParams(self) -> Optional[list[str]]: + """Get the test params from a recipe.""" if self.contents.get('test_params', None): - return self.contents.get('test_params', '').split(' ') + return str(self.contents.get('test_params', '')).split(' ') return None diff --git a/dftimewolf/lib/validators/integer.py b/dftimewolf/lib/validators/integer.py new file mode 100644 index 000000000..3116236a5 --- /dev/null +++ b/dftimewolf/lib/validators/integer.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +"""Validator for integers.""" + +from dftimewolf.lib import errors, resources, args_validator +from dftimewolf.lib.validators import manager as validators_manager + + +class IntegerValidator(args_validator.AbstractValidator): + """Validates an integer.""" + + NAME = 'integer' + + def Validate(self, + argument_value: str, + recipe_argument: resources.RecipeArgument) -> int: + """Validate that the argument_value is a valid integer. + + Args: + argument_value: The argument value to validate. + recipe_argument: The definition of the argument. + + Returns: + A parsed integer. + + Raises: + errors.RecipeArgsValidationFailure: If the argument is not an integer. + """ + try: + return int(argument_value) + except ValueError: + raise errors.RecipeArgsValidationFailure( + recipe_argument.switch, + argument_value, + self.NAME, + 'Not a valid integer') + + +validators_manager.ValidatorsManager.RegisterValidator(IntegerValidator) diff --git a/tests/lib/validators/integer.py b/tests/lib/validators/integer.py new file mode 100644 index 000000000..1db8633c3 --- /dev/null +++ b/tests/lib/validators/integer.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +"""Tests for the subnet validator.""" + +from absl.testing import absltest +from absl.testing import parameterized + +from dftimewolf.lib import errors, resources +from dftimewolf.lib.validators import integer + + +class IntegerValidatorTest(parameterized.TestCase): + """Tests IntegerValidator.""" + + def setUp(self): + """Setup.""" + self.validator = integer.IntegerValidator() + self.recipe_argument = resources.RecipeArgument() + self.recipe_argument.switch = 'testinteger' + + + def testInit(self): + """Tests initialisation.""" + self.assertEqual(self.validator.NAME, 'integer') + + @parameterized.named_parameters( + ('zero', '0', 0), + ('five', '5', 5), + ('fivemill', '5000000', 5000000), + ('minusfive', '-5', -5), + ) + def testValidateSuccess(self, input, expected): + """Test that correct values do not throw an exception.""" + result = self.validator.Validate(input, self.recipe_argument) + self.assertEqual(result, expected) + + def testValidateFailure(self): + """Test integer test failure.""" + values = ['foo', '5.5'] + + for value in values: + with self.assertRaisesRegex( + errors.RecipeArgsValidationFailure, + 'Not a valid integer'): + self.validator.Validate(value, self.recipe_argument) + + +if __name__ == '__main__': + absltest.main() + From 35920f4814f06865bbb94d95d20fecfd34233209 Mon Sep 17 00:00:00 2001 From: ramoj Date: Thu, 25 Jul 2024 04:46:19 +0000 Subject: [PATCH 6/9] Linter appeasement --- tests/cli/main_tool.py | 4 +++- tests/lib/validators/integer.py | 22 +++++++++++----------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index 0b1783255..9fc47b0e9 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -104,6 +104,7 @@ def testToolWithArbitraryRecipe(self): @parameterized.named_parameters(_EnumerateRecipeNames()) def testRecipeSetupArgs(self, recipe_name): + """Parameterised version of _testRecipeSetupArgs.""" self._testRecipeSetupArgs(recipe_name) def _testRecipeSetupArgs(self, recipe_name): @@ -132,6 +133,7 @@ def _testRecipeSetupArgs(self, recipe_name): @parameterized.named_parameters(_EnumerateRecipeNames()) def testRecipeValidators(self, recipe_name): + """Parameterised version of _testRecipeValidators.""" self._testRecipeValidators(recipe_name) def _testRecipeValidators(self, recipe_name): @@ -153,7 +155,7 @@ def _testRecipeValidators(self, recipe_name): validators_manager.ValidatorsManager.ListValidators(), f'Error in {recipe.name}:{arg.switch} - ' f'Invalid validator {arg.validation_params["format"]}.') - + if test_params: self.tool.ValidateArguments() diff --git a/tests/lib/validators/integer.py b/tests/lib/validators/integer.py index 1db8633c3..212f8694a 100644 --- a/tests/lib/validators/integer.py +++ b/tests/lib/validators/integer.py @@ -28,22 +28,22 @@ def testInit(self): ('fivemill', '5000000', 5000000), ('minusfive', '-5', -5), ) - def testValidateSuccess(self, input, expected): + def testValidateSuccess(self, in_param, expected): """Test that correct values do not throw an exception.""" - result = self.validator.Validate(input, self.recipe_argument) + result = self.validator.Validate(in_param, self.recipe_argument) self.assertEqual(result, expected) - def testValidateFailure(self): + @parameterized.named_parameters( + ('str', 'foo'), + ('float', '5.5') + ) + def testValidateFailure(self, in_param): """Test integer test failure.""" - values = ['foo', '5.5'] - - for value in values: - with self.assertRaisesRegex( - errors.RecipeArgsValidationFailure, - 'Not a valid integer'): - self.validator.Validate(value, self.recipe_argument) + with self.assertRaisesRegex( + errors.RecipeArgsValidationFailure, + 'Not a valid integer'): + self.validator.Validate(in_param, self.recipe_argument) if __name__ == '__main__': absltest.main() - From 387d16dfc87a2ea2333d4a8e25cc5b748c9b43ba Mon Sep 17 00:00:00 2001 From: ramoj Date: Fri, 26 Jul 2024 01:19:03 +0000 Subject: [PATCH 7/9] Applied test_params to all recipes --- data/recipes/aws_disk_to_gcp.json | 1 + data/recipes/aws_logging_collect.json | 1 + data/recipes/aws_logging_ts.json | 3 ++- data/recipes/aws_turbinia_ts.json | 3 ++- data/recipes/azure_forensics.json | 3 ++- data/recipes/azure_logging_collect.json | 1 + data/recipes/azure_logging_ts.json | 3 ++- data/recipes/bigquery_collect.json | 1 + data/recipes/bigquery_ts.json | 3 ++- data/recipes/gce_disk_copy.json | 1 + data/recipes/gce_disk_export.json | 1 + data/recipes/gcp_cloud_resource_tree.json | 1 + data/recipes/gcp_cloud_resource_tree_offline.json | 2 +- data/recipes/gcp_disk_export_dd.json | 1 + data/recipes/gcp_forensics.json | 5 +++-- data/recipes/gcp_logging_cloudaudit_ts.json | 5 +++-- data/recipes/gcp_logging_cloudsql_ts.json | 7 ++++--- data/recipes/gcp_logging_collect.json | 3 ++- data/recipes/gcp_logging_gce_instance_ts.json | 5 +++-- data/recipes/gcp_logging_gce_ts.json | 5 +++-- data/recipes/gcp_logging_ts.json | 5 +++-- data/recipes/gcp_turbinia_disk_copy_ts.json | 7 ++++--- data/recipes/gcp_turbinia_ts.json | 3 ++- data/recipes/grr_artifact_grep.json | 3 ++- data/recipes/grr_artifact_ts.json | 5 +++-- data/recipes/grr_files_collect.json | 3 ++- data/recipes/grr_flow_collect.json | 1 + data/recipes/grr_hunt_artifacts.json | 3 ++- data/recipes/grr_hunt_file.json | 3 ++- data/recipes/grr_hunt_osquery.json | 3 ++- data/recipes/grr_huntresults_ts.json | 3 ++- data/recipes/grr_osquery_flow.json | 3 ++- data/recipes/grr_timeline_ts.json | 3 ++- data/recipes/grr_yarascan.json | 1 + data/recipes/gsheets_ts.json | 3 ++- data/recipes/plaso_ts.json | 3 ++- data/recipes/upload_ts.json | 3 ++- data/recipes/upload_turbinia.json | 3 ++- data/recipes/upload_web_ts.json | 5 +++-- data/recipes/vt_evtx.json | 1 + data/recipes/vt_evtx_ts.json | 3 ++- data/recipes/vt_pcap.json | 1 + data/recipes/workspace_collect.json | 1 + data/recipes/workspace_meet_ts.json | 5 +++-- data/recipes/workspace_user_activity_ts.json | 3 ++- data/recipes/workspace_user_device_ts.json | 3 ++- data/recipes/workspace_user_drive_ts.json | 3 ++- data/recipes/workspace_user_login_ts.json | 3 ++- dftimewolf/lib/validators/__init__.py | 1 + tests/cli/main_tool.py | 5 +++-- 50 files changed, 98 insertions(+), 49 deletions(-) diff --git a/data/recipes/aws_disk_to_gcp.json b/data/recipes/aws_disk_to_gcp.json index 964457f83..5837c7a37 100644 --- a/data/recipes/aws_disk_to_gcp.json +++ b/data/recipes/aws_disk_to_gcp.json @@ -2,6 +2,7 @@ "name": "aws_disk_to_gcp", "short_description": "Copies EBS volumes from within AWS, and transfers them to GCP.", "description": "Copies EBS volumes from within AWS by pushing them to an AWS S3 bucket. The S3 bucket is then copied to a Google Cloud Storage bucket, from which a GCP Disk Image and finally a GCP Persistent Disk are created. This operation happens in the cloud and doesn't touch the local workstation on which the recipe is run.", + "test_params": "ap-southeast-2 australia-southeast2-a vol-01234567 s3://aws-bucket gs://gcp-bucket", "preflights": [ { "wants": [], diff --git a/data/recipes/aws_logging_collect.json b/data/recipes/aws_logging_collect.json index 926d27aeb..5ada2f37b 100644 --- a/data/recipes/aws_logging_collect.json +++ b/data/recipes/aws_logging_collect.json @@ -2,6 +2,7 @@ "name": "aws_logging_collect", "short_description": "Collects logs from an AWS account and dumps the results to the filesystem.", "description": "Collects logs from an AWS account using a specified query filter and date ranges, and dumps them on the filesystem. If no args are provided this recipe will collect 90 days of logs for the default AWS profile.", + "test_params": "ap-southeast-2", "preflights": [{ "wants": [], "name": "AWSAccountCheck", diff --git a/data/recipes/aws_logging_ts.json b/data/recipes/aws_logging_ts.json index be9a201e5..ef5d7a6bc 100644 --- a/data/recipes/aws_logging_ts.json +++ b/data/recipes/aws_logging_ts.json @@ -2,6 +2,7 @@ "name": "aws_logging_ts", "short_description": "Collects logs from an AWS account, processes the logs with Plaso and uploads the result to Timesketch.", "description": "Collects logs from an AWS account using a specified query filter and date ranges, processes the logs with plaso and uploads the result to Timesketch. If no args are provided this recipe will collect 90 days of logs for the default AWS profile.", + "test_params": "ap-southeast-2", "preflights": [{ "wants": [], "name": "AWSAccountCheck", @@ -47,7 +48,7 @@ ["--start_time", "Start time for the query.", null, {"format": "datetime", "before": "@end_time"}], ["--end_time", "End time for the query.", null, {"format": "datetime_end", "after": "@start_time"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/aws_turbinia_ts.json b/data/recipes/aws_turbinia_ts.json index 4399a0d8e..548ee0865 100644 --- a/data/recipes/aws_turbinia_ts.json +++ b/data/recipes/aws_turbinia_ts.json @@ -2,6 +2,7 @@ "name": "aws_turbinia_ts", "short_description": "Copies EBS volumes from within AWS, transfers them to GCP, analyses with Turbinia and exports the results to Timesketch.", "description": "Copies EBS volumes from within AWS, uses buckets and cloud-to-cloud operations to transfer the data to GCP. Once in GCP, a persistent disk is created and a job is added to the Turbinia queue to start analysis. The resulting Plaso file is then exported to Timesketch.", + "test_params": "ap-southeast-2 australia-southeast2-a vol-01234567 s3://aws-bucket gs://gcp-bucket", "preflights": [ { "wants": [], @@ -98,7 +99,7 @@ ["--gcp_project", "Destination GCP project.", null, {"format": "regex", "comma_separated": false, "regex": "^[a-z][-a-z0-9]{4,28}[a-z0-9]$"}], ["--aws_profile", "Source AWS profile.", null], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/azure_forensics.json b/data/recipes/azure_forensics.json index 5721507a7..e1e8e7b6b 100644 --- a/data/recipes/azure_forensics.json +++ b/data/recipes/azure_forensics.json @@ -2,6 +2,7 @@ "name": "azure_forensics", "short_description": "Copies a disk from an Azure account to an analysis VM.", "description": "Copies a disk from an Azure account, creates an analysis VM in Azure (with a startup script containing installation instructions for basic forensics tooling), and attaches the copied disk to it.", + "test_params": "remote_profile_name analysis_resource_group_name incident_id ssh_public_key", "modules": [{ "wants": [], "name": "AzureCollector", @@ -28,7 +29,7 @@ ["--instance_name", "Instance name of the instance to analyze.", null], ["--disk_names", "Comma-separated list of disk names to copy.", null], ["--all_disks", "Copy all disks in the designated instance. Overrides `disk_names` if specified.", false], - ["--boot_disk_size", "The size of the analysis VM's boot disk (in GB).", 50, {"format": "regex", "regex": "^\\d+$"}], + ["--boot_disk_size", "The size of the analysis VM's boot disk (in GB).", "50", {"format": "integer"}], ["--analysis_region", "The Azure region in which to create the VM.", null, {"format": "azure_region"}], ["--analysis_profile_name", "Name of the Azure profile to use when creating the analysis VM.", null] ] diff --git a/data/recipes/azure_logging_collect.json b/data/recipes/azure_logging_collect.json index 065fba9a7..a757a6170 100644 --- a/data/recipes/azure_logging_collect.json +++ b/data/recipes/azure_logging_collect.json @@ -2,6 +2,7 @@ "name": "azure_logging_collect", "short_description": "Collects logs from an Azure subscription and dumps the results to the filesystem.", "description": "Collects logs from an Azure subscription using a specified filter, and dumps them on the filesystem.", + "test_params": "subscription_id filter_expression", "modules": [{ "wants": [], "name": "AzureLogsCollector", diff --git a/data/recipes/azure_logging_ts.json b/data/recipes/azure_logging_ts.json index cf193a70e..352d5f4b4 100644 --- a/data/recipes/azure_logging_ts.json +++ b/data/recipes/azure_logging_ts.json @@ -2,6 +2,7 @@ "name": "azure_logging_ts", "short_description": "Collects logs from an Azure subscription, processes the logs with Plaso and uploads the result to Timesketch.", "description": "Collects logs from an Azure subscription using a specified query filter and date ranges, processes the logs with plaso and uploads the result to Timesketch.", + "test_params": "subscription_id filter_expression", "preflights": [], "modules": [{ "wants": [], @@ -37,7 +38,7 @@ ["filter_expression", "A filter expression to use for the log query, must specify at least a start date like \"eventTimestamp ge '2022-02-01'\"", null], ["--profile_name", "A profile name to use when looking for Azure credentials.", null], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/bigquery_collect.json b/data/recipes/bigquery_collect.json index e9841cbff..0d94f0525 100644 --- a/data/recipes/bigquery_collect.json +++ b/data/recipes/bigquery_collect.json @@ -2,6 +2,7 @@ "name": "bigquery_collect", "short_description": "Collects results from BigQuery and dumps them on the filesystem.", "description": "Collects results from BigQuery in a GCP project and dumps them in JSONL on the local filesystem.", + "test_params": "projectfolder.projectname query description", "preflights": [{ "wants": [], "name": "GCPTokenCheck", diff --git a/data/recipes/bigquery_ts.json b/data/recipes/bigquery_ts.json index ea401e56b..933a502ba 100644 --- a/data/recipes/bigquery_ts.json +++ b/data/recipes/bigquery_ts.json @@ -2,6 +2,7 @@ "name": "bigquery_ts", "short_description": "Collects results from BigQuery and uploads them to Timesketch.", "description": "Collects results from BigQuery in JSONL form, dumps them to the filesystem, and uploads them to Timesketch.", + "test_params": "prohectfolder.projectname query description", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -38,7 +39,7 @@ ["query", "Query to execute.", null], ["description", "Human-readable description of the query.", null], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/gce_disk_copy.json b/data/recipes/gce_disk_copy.json index 11f738bae..40240c7c4 100644 --- a/data/recipes/gce_disk_copy.json +++ b/data/recipes/gce_disk_copy.json @@ -2,6 +2,7 @@ "name": "gce_disk_copy", "short_description": "Copy disks from one project to another.", "description": "Copies disks from one project to another. The disks can be specified individually, or instances can be specified, to copy all their disks or boot disks.", + "test_params": "projectname", "preflights": [{ "wants": [], "name": "GCPTokenCheck", diff --git a/data/recipes/gce_disk_export.json b/data/recipes/gce_disk_export.json index 67c4d30ff..638db3640 100644 --- a/data/recipes/gce_disk_export.json +++ b/data/recipes/gce_disk_export.json @@ -2,6 +2,7 @@ "name": "gce_disk_export", "short_description": "Export a disk image from a GCP project to a Google Cloud Storage bucket.", "description": "Creates a disk image from Google Compute persistent disks, compresses the images, and exports them to Google Cloud Storage.\n\nThe exported images names are appended by `.tar.gz.`\n\nAs this export happens through a Cloud Build job, the default service account `[PROJECT-NUMBER]@cloudbuild.gserviceaccount.com` in the source or analysis project (if provided) must have the IAM role `[Storage Admin]` on their corresponding project's storage bucket/folder.", + "test_params": "sourceproject gs://bucket", "preflights": [{ "wants": [], "name": "GCPTokenCheck", diff --git a/data/recipes/gcp_cloud_resource_tree.json b/data/recipes/gcp_cloud_resource_tree.json index abc45eccf..4eff9dbc8 100644 --- a/data/recipes/gcp_cloud_resource_tree.json +++ b/data/recipes/gcp_cloud_resource_tree.json @@ -2,6 +2,7 @@ "name": "gcp_cloud_resource_tree", "description": "Generates a parent/children tree for given GCP resource by enumerating all the currently available resources. It also will attempt to fill any gaps identified in the tree through querying the GCP logs", "short_description": "Generates a parent/children tree for given GCP resource.", + "test_params": "projectid australia-southeast2-a resource_type", "preflights": [{ "wants": [], "name": "GCPTokenCheck", diff --git a/data/recipes/gcp_cloud_resource_tree_offline.json b/data/recipes/gcp_cloud_resource_tree_offline.json index 15c5f7e1a..419f01470 100644 --- a/data/recipes/gcp_cloud_resource_tree_offline.json +++ b/data/recipes/gcp_cloud_resource_tree_offline.json @@ -2,6 +2,7 @@ "name": "gcp_cloud_resource_tree_offline", "description": "Generates a parent/children tree for given GCP resource using the supplied exported GCP logs", "short_description": "Generates a parent/children tree for given GCP resource using the supplied exported GCP logs", + "test_params": "projectid australia-southeast2-a resource_type paths", "modules": [{ "wants": [], "name": "FilesystemCollector", @@ -27,6 +28,5 @@ ["paths", "Comma-separated paths to GCP log files. Log files should contain log entiries in json format.", null], ["--resource_id","Resource id", null], ["--resource_name","Resource name", null] - ] } diff --git a/data/recipes/gcp_disk_export_dd.json b/data/recipes/gcp_disk_export_dd.json index 1c50ad753..5c45ab656 100644 --- a/data/recipes/gcp_disk_export_dd.json +++ b/data/recipes/gcp_disk_export_dd.json @@ -2,6 +2,7 @@ "name": "gce_disk_export_dd", "short_description": "Stream the disk bytes from a GCP project to a Google Cloud Storage bucket.", "description": "The export is performed via bit streaming the the disk bytes to GCS. This will allow getting a disk image out of the project in case both organization policies `constraints/compute.storageResourceUseRestrictions` and `constraints/compute.trustedImageProjects` are enforced and in case OsLogin is allowed only for the organization users while the analyst is an external user with no roles/`compute.osLoginExternalUser` role.\n\nThe exported images names are appended by `.tar.gz.`\n\nThe compute engine default service account in the source project must have sufficient permissions to Create and List Storage objects on the corresponding storage bucket/folder.", + "test_params": "projectname gs://bucket", "preflights": [{ "wants": [], "name": "GCPTokenCheck", diff --git a/data/recipes/gcp_forensics.json b/data/recipes/gcp_forensics.json index 4b9fd54b4..841eefbcf 100644 --- a/data/recipes/gcp_forensics.json +++ b/data/recipes/gcp_forensics.json @@ -2,6 +2,7 @@ "name": "gcp_forensics", "short_description": "Copies disk from a GCP project to an analysis VM.", "description": "Copies a persistent disk from a GCP project to another, creates an analysis VM (with a startup script containing installation instructions for basic forensics tooling) in the destination project, and attaches the copied GCP persistent disk to it.", + "test_params": "source-project-name analysis-project-name", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -56,8 +57,8 @@ ["--all_disks", "Copy all disks in the designated instance. Overrides `disk_names` if specified.", false], ["--stop_instances", "Stop the designated instance after copying disks.", false], ["--create_analysis_vm", "Create an analysis VM in the destination project.", true], - ["--cpu_cores", "Number of CPU cores of the analysis VM.", 4, {"format": "regex", "regex": "^\\d+$"}], - ["--boot_disk_size", "The size of the analysis VM boot disk (in GB).", 50, {"format": "regex", "regex": "^\\d+$"}], + ["--cpu_cores", "Number of CPU cores of the analysis VM.", "4", {"format": "integer"}], + ["--boot_disk_size", "The size of the analysis VM boot disk (in GB).", "50", {"format": "integer"}], ["--boot_disk_type", "Disk type to use [pd-standard, pd-ssd].", "pd-standard", {"format": "regex", "regex": "^pd-((ssd)|(standard))$"}], ["--zone", "The GCP zone where the Analysis VM and copied disks will be created.", "us-central1-f", {"format": "gcp_zone"}] ] diff --git a/data/recipes/gcp_logging_cloudaudit_ts.json b/data/recipes/gcp_logging_cloudaudit_ts.json index 8ed9b7981..32d6d6d52 100644 --- a/data/recipes/gcp_logging_cloudaudit_ts.json +++ b/data/recipes/gcp_logging_cloudaudit_ts.json @@ -2,6 +2,7 @@ "name": "gcp_logging_cloudaudit_ts", "short_description": "Collects GCP logs from a project and exports them to Timesketch.", "description": "Collects GCP logs from a project and exports them to Timesketch. Some light processing is made to translate the logs into something Timesketch can process.", + "test_params": "project-name 2024-01-01 2024-01-31", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -46,13 +47,13 @@ ["start_date", "Start date.", null, {"format": "datetime", "before": "@end_date"}], ["end_date", "End date.", null, {"format": "datetime_end", "after": "@start_date"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], ["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""], ["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true], ["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false], - ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}] + ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}] ] } diff --git a/data/recipes/gcp_logging_cloudsql_ts.json b/data/recipes/gcp_logging_cloudsql_ts.json index 3f1389ee8..4c75a2ee4 100644 --- a/data/recipes/gcp_logging_cloudsql_ts.json +++ b/data/recipes/gcp_logging_cloudsql_ts.json @@ -2,7 +2,8 @@ "name": "gcp_logging_cloudsql_ts", "short_description": "Collects GCP related to Cloud SQL instances in a project and exports them to Timesketch.", "description": "Collects GCP related to Cloud SQL instances in a project and exports them to Timesketch. Some light processing is made to translate the logs into something Timesketch can process.", - "preflights": [{ + "test_params": "project-name 2024-01-01 2024-01-31", + "preflights": [{ "wants": [], "name": "GCPTokenCheck", "args": { @@ -50,13 +51,13 @@ ["start_date", "Start date.", null, {"format": "datetime", "before": "@end_date"}], ["end_date", "End date.", null, {"format": "datetime_end", "after": "@start_date"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], ["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""], ["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true], ["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false], - ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}] + ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}] ] } diff --git a/data/recipes/gcp_logging_collect.json b/data/recipes/gcp_logging_collect.json index 661191608..9b9ce9912 100644 --- a/data/recipes/gcp_logging_collect.json +++ b/data/recipes/gcp_logging_collect.json @@ -2,6 +2,7 @@ "name": "gcp_logging_collect", "description": "Collects logs from a GCP project and dumps on the filesystem.", "short_description": "Collects logs from a GCP project and dumps on the filesystem (JSON). https://cloud.google.com/logging/docs/view/query-library for example queries.", + "test_params": "project-name filter_expression", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -25,6 +26,6 @@ ["project_name", "Name of the GCP project to collect logs from.", null, {"format": "regex", "comma_separated": false, "regex": "^[a-z][-a-z0-9]{4,28}[a-z0-9]$"}], ["filter_expression", "Filter expression to use to query GCP logs. See https://cloud.google.com/logging/docs/view/query-library for examples.", "resource.type = 'gce_instance'"], ["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false], - ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}] + ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}] ] } diff --git a/data/recipes/gcp_logging_gce_instance_ts.json b/data/recipes/gcp_logging_gce_instance_ts.json index f7cb69f59..8df59539c 100644 --- a/data/recipes/gcp_logging_gce_instance_ts.json +++ b/data/recipes/gcp_logging_gce_instance_ts.json @@ -2,6 +2,7 @@ "name": "gcp_logging_gce_instance_ts", "short_description": "GCP Instance Cloud Audit logs to Timesketch", "description": "Collects GCP Cloud Audit Logs for a GCE instance and exports them to Timesketch. Some light processing is made to translate the logs into something Timesketch can process.", + "test_params": "project-name instance-id", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -45,13 +46,13 @@ ["project_name", "Name of the GCP project to collect logs from.", null, {"format": "regex", "comma_separated": false, "regex": "^[a-z][-a-z0-9]{4,28}[a-z0-9]$"}], ["instance_id", "Identifier for GCE instance (Instance ID).", null, {"format": "regex", "regex": "^[a-z][-a-z0-9]{0,61}[a-z0-9]?$"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], ["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""], ["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true], ["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false], - ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}] + ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}] ] } diff --git a/data/recipes/gcp_logging_gce_ts.json b/data/recipes/gcp_logging_gce_ts.json index 4503989fa..735305ea3 100644 --- a/data/recipes/gcp_logging_gce_ts.json +++ b/data/recipes/gcp_logging_gce_ts.json @@ -2,6 +2,7 @@ "name": "gcp_logging_gce_ts", "short_description": "Loads all GCE Cloud Audit Logs in a GCP project into Timesketch.", "description": "Loads all GCE Cloud Audit Logs for all instances in a GCP project into Timesketch. Some light processing is made to translate the logs into something Timesketch can process.", + "test_params": "project-name 2024-01-01 2024-01-31", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -46,13 +47,13 @@ ["start_date", "Start date.", null, {"format": "datetime", "before": "@end_date"}], ["end_date", "End date.", null, {"format": "datetime_end", "after": "@start_date"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], ["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""], ["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true], ["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false], - ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}] + ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}] ] } diff --git a/data/recipes/gcp_logging_ts.json b/data/recipes/gcp_logging_ts.json index 7ea279bb6..282fc2057 100644 --- a/data/recipes/gcp_logging_ts.json +++ b/data/recipes/gcp_logging_ts.json @@ -2,6 +2,7 @@ "name": "gcp_logging_ts", "short_description": "Collects logs from a GCP project and sends them to Timesketch.", "description": "Collects logs from a GCP project and sends them to Timesketch. https://cloud.google.com/logging/docs/view/query-library for example queries.", + "test_params": "project-name filter_expression", "preflights": [ { "wants": [], @@ -45,9 +46,9 @@ ["project_name", "Name of the GCP project to collect logs from.", null, {"format": "regex", "comma_separated": false, "regex": "^[a-z][-a-z0-9]{4,28}[a-z0-9]$"}], ["filter_expression", "Filter expression to use to query GCP logs. See https://cloud.google.com/logging/docs/view/query-library for examples.", "resource.type = 'gce_instance'"], ["--backoff", "If GCP Cloud Logging API query limits are exceeded, retry with an increased delay between each query to try complete the query at a slower rate.", false], - ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", 0, {"format": "regex", "regex": "^\\d+$"}], + ["--delay", "Number of seconds to wait between each GCP Cloud Logging query to avoid hitting API query limits", "0", {"format": "integer"}], ["--analyzers", "Timesketch analyzers to run.", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/gcp_turbinia_disk_copy_ts.json b/data/recipes/gcp_turbinia_disk_copy_ts.json index 57c9bc27f..350c6eca8 100644 --- a/data/recipes/gcp_turbinia_disk_copy_ts.json +++ b/data/recipes/gcp_turbinia_disk_copy_ts.json @@ -2,6 +2,7 @@ "name": "gcp_turbinia_disk_copy_ts", "short_description": "Imports a remote GCP persistent disk, processes it with Turbinia and sends results to Timesketch.", "description": "Imports a remote GCP persistent disk into an analysis GCP project and sends the result of Turbinia processing to Timesketch.\n\n- Copies a disk from a remote GCP project into an analysis project\n- Creates Turbinia processing request to process the imported disk\n- Downloads and sends results of the Turbinia processing to Timesketch.\n\nThis recipe will also start an analysis VM in the destination project with the attached disk (the same one that Turbinia will have processed). If the target disk is already in the same project as Turbinia, you can use the `gcp_turbinia_ts` recipe.", + "test_params": "source-project-name analysis-project-name", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -88,7 +89,7 @@ ["--turbinia_auth", "Flag to indicate whether Turbinia API server requires authentication.", false], ["--turbinia_api", "Turbinia API server endpoint.", "http://127.0.0.1:8000"], ["--incident_id", "Incident ID (used for Timesketch description and to label the VM with).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], @@ -99,8 +100,8 @@ ["--disks", "Comma-separated list of disks to copy from the source GCP project (if `instance` not provided).", null], ["--all_disks", "Copy all disks in the designated instance. Overrides disk_names if specified.", false], ["--stop_instances", "Stop the designated instances after copying disks.", false], - ["--cpu_cores", "Number of CPU cores of the analysis VM.", 4, {"format": "regex", "regex": "^\\d+$"}], - ["--boot_disk_size", "The size of the analysis VM boot disk (in GB).", 50, {"format": "regex", "regex": "^\\d+$"}], + ["--cpu_cores", "Number of CPU cores of the analysis VM.", "4", {"format": "integer"}], + ["--boot_disk_size", "The size of the analysis VM boot disk (in GB).", "50", {"format": "integer"}], ["--boot_disk_type", "Disk type to use [pd-standard, pd-ssd]", "pd-standard", {"format": "regex", "regex": "^pd-((ssd)|(standard))$"}], ["--image_project", "Name of the project where the analysis VM image is hosted.", "ubuntu-os-cloud"], ["--image_family", "Name of the image to use to create the analysis VM.", "ubuntu-2204-lts"] diff --git a/data/recipes/gcp_turbinia_ts.json b/data/recipes/gcp_turbinia_ts.json index 3551ad657..8dda39774 100644 --- a/data/recipes/gcp_turbinia_ts.json +++ b/data/recipes/gcp_turbinia_ts.json @@ -2,6 +2,7 @@ "name": "gcp_turbinia_ts", "short_description": "Processes existing GCP persistent disks with Turbinia project and sends results to Timesketch.", "description": "Process GCP persistent disks with Turbinia and send output to Timesketch.\n\nThis processes disks that are already in the project where Turbinia exists. If you want to copy disks from another project, use the `gcp_turbinia_disk_copy_ts` recipe.", + "test_params": "analysis-project-name australia-southeast2-a", "preflights": [{ "wants": [], "name": "GCPTokenCheck", @@ -43,7 +44,7 @@ ["--disk_names", "Comma separated names of GCP persistent disks to process. This parameter can only be used if --request_ids is not provided.", null, {"format": "regex", "comma_separated": true, "regex": "^[a-z][-a-z0-9]{0,61}[a-z0-9]?$"}], ["--request_ids", "Comma separated Turbinia request identifiers to process. This parameter can only be used if --disk_names is not provided.", null, {"format": "regex", "comma_separated": true, "regex": "^[a-f0-9]{32}$"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/grr_artifact_grep.json b/data/recipes/grr_artifact_grep.json index eb081aaae..b28ce38a6 100644 --- a/data/recipes/grr_artifact_grep.json +++ b/data/recipes/grr_artifact_grep.json @@ -2,6 +2,7 @@ "name": "grr_artifact_grep", "short_description": "Fetches ForensicArtifacts from GRR hosts and runs grep with a list of keywords on them.", "description": "Collect ForensicArtifacts from hosts using GRR.\n\n- Collect a predefined list of artifacts from hosts using GRR\n- Process them locally with grep to extract keywords.", + "test_params": "C.0123456789012345 reason keywords", "modules": [{ "wants": [], "name": "GRRArtifactCollector", @@ -39,6 +40,6 @@ ["--skip_offline_clients", "Whether to skip clients that are offline.", false], ["--grr_username", "GRR username.", "admin"], ["--grr_password", "GRR password.", "admin"], - ["--max_file_size", "Maximum size of files to collect (in bytes).", 5368709120, {"format": "regex", "regex": "^\\d+$"}] + ["--max_file_size", "Maximum size of files to collect (in bytes).", "5368709120", {"format": "integer"}] ] } diff --git a/data/recipes/grr_artifact_ts.json b/data/recipes/grr_artifact_ts.json index 03a4b705b..d0dbb7d46 100644 --- a/data/recipes/grr_artifact_ts.json +++ b/data/recipes/grr_artifact_ts.json @@ -2,6 +2,7 @@ "name": "grr_artifact_ts", "short_description": "Fetches default ForensicArtifacts from a sequence of GRR hosts, processes them with plaso, and sends the results to Timesketch.", "description": "Collect artifacts from hosts using GRR.\n\n- Collect a predefined list of artifacts from hosts using GRR\n- Process them with a local install of plaso\n- Export them to a Timesketch sketch.\n\nThe default set of artifacts is defined in the GRRArtifactCollector module (see the `_DEFAULT_ARTIFACTS_*` class attributes in `grr_hosts.py`), and varies per platform.", + "test_params": "C.0123456789012345 reason", "modules": [{ "wants": [], "name": "GRRArtifactCollector", @@ -47,7 +48,7 @@ ["--extra_artifacts", "Comma-separated list of artifacts to append to the default artifact list.", null], ["--use_raw_filesystem_access", "Use raw disk access to fetch artifacts.", false], ["--approvers", "Emails for GRR approval request.", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true], ["--analyzers", "Timesketch analyzers to run", null], ["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""], @@ -61,6 +62,6 @@ ["--grr_username", "GRR username", "admin"], ["--grr_password", "GRR password", "admin"], ["--user_docker", "Whether the LocalPlasoProcessor should use Docker or not.", true], - ["--max_file_size", "Maximum size of files to collect (in bytes).", 5368709120, {"format": "regex", "regex": "^\\d+$"}] + ["--max_file_size", "Maximum size of files to collect (in bytes).", "5368709120", {"format": "integer"}] ] } diff --git a/data/recipes/grr_files_collect.json b/data/recipes/grr_files_collect.json index d5d9d57df..d13ca5c8f 100644 --- a/data/recipes/grr_files_collect.json +++ b/data/recipes/grr_files_collect.json @@ -2,6 +2,7 @@ "name": "grr_files_collect", "short_description": "Collects specific files from one or more GRR hosts.", "description": "Collects specific files from one or more GRR hosts. Files can be a glob pattern (e.g. `/tmp/*.so`) and support GRR variable interpolation (e.g. `%%users.localappdata%%/Directory/`) ", + "test_params": "C.0123456789012345 reason files directory", "modules": [{ "wants": [], "name": "GRRFileCollector", @@ -40,6 +41,6 @@ ["--grr_server_url", "GRR endpoint", "http://localhost:8000", {"format": "url"}], ["--grr_username", "GRR username", "admin"], ["--grr_password", "GRR password", "admin"], - ["--max_file_size", "Maximum size of files to collect (in bytes).", 5368709120, {"format": "regex", "regex": "^\\d+$"}] + ["--max_file_size", "Maximum size of files to collect (in bytes).", "5368709120", {"format": "integer"}] ] } diff --git a/data/recipes/grr_flow_collect.json b/data/recipes/grr_flow_collect.json index a85d90276..61c8d88f5 100644 --- a/data/recipes/grr_flow_collect.json +++ b/data/recipes/grr_flow_collect.json @@ -2,6 +2,7 @@ "name": "grr_flow_collect", "short_description": "Download the result of a GRR flow to the local filesystem.", "description": "Download the result of a GRR flow to the local filesystem. Flow IDs are unique *per client*, so both need to be provided in sequence.", + "test_params": "C.0123456789012345 0123456789012345 reason directory", "modules": [{ "wants": [], "name": "GRRFlowCollector", diff --git a/data/recipes/grr_hunt_artifacts.json b/data/recipes/grr_hunt_artifacts.json index 96d29cbbc..e9770c883 100644 --- a/data/recipes/grr_hunt_artifacts.json +++ b/data/recipes/grr_hunt_artifacts.json @@ -2,6 +2,7 @@ "name": "grr_hunt_artifacts", "short_description": "Starts a GRR hunt for the default set of artifacts.", "description": "Starts a GRR artifact hunt and provides the Hunt ID to the user. Feed the Hunt ID to `grr_huntresults_ts` to process results through Plaso and export them to Timesketch.", + "test_params": "artefacts reason", "modules": [{ "wants": [], "name": "GRRHuntArtifactCollector", @@ -29,7 +30,7 @@ ["--verify", "Whether to verify the GRR TLS certificate.", true], ["--grr_username", "GRR username", "admin"], ["--grr_password", "GRR password", "admin"], - ["--max_file_size", "Maximum size of files to collect (in bytes).", 5368709120, {"format": "regex", "regex": "^\\d+$"}], + ["--max_file_size", "Maximum size of files to collect (in bytes).", "5368709120", {"format": "integer"}], ["--match_mode", "Match mode of the client rule set (ANY or ALL)", null, {"format": "regex", "regex": "^(ANY)|(ALL)$"}], ["--client_operating_systems", "Comma-separated list of client operating systems to filter hosts on (linux, osx, win).", null, {"format": "regex", "regex": "^(linux)|(osx)|(win)$", "comma_separated": true}], ["--client_labels", "Comma-separated list of client labels to filter GRR hosts on.", null] diff --git a/data/recipes/grr_hunt_file.json b/data/recipes/grr_hunt_file.json index 34e53c0d0..e2282d54b 100644 --- a/data/recipes/grr_hunt_file.json +++ b/data/recipes/grr_hunt_file.json @@ -2,6 +2,7 @@ "name": "grr_hunt_file", "short_description": "Starts a GRR hunt for a list of files.", "description": "Starts a GRR hunt for a list of files and provides a Hunt ID to the user. Feed the Hunt ID to `grr_huntresults_ts` to process results through Plaso and export them to Timesketch.\n\nLike in `grr_files_collect`, files can be globs and support variable interpolation.", + "test_params": "file-path-list reason", "modules": [{ "wants": [], "name": "GRRHuntFileCollector", @@ -29,7 +30,7 @@ ["--verify", "Whether to verify the GRR TLS certificate.", true], ["--grr_username", "GRR username", "admin"], ["--grr_password", "GRR password", "admin"], - ["--max_file_size", "Maximum size of files to collect (in bytes).", 5368709120, {"format": "regex", "regex": "^\\d+$"}], + ["--max_file_size", "Maximum size of files to collect (in bytes).", "5368709120", {"format": "integer"}], ["--match_mode", "Match mode of the client rule set (ANY or ALL)", null], ["--client_operating_systems", "Comma-separated list of client operating systems to filter hosts on (linux, osx, win).", null, {"format": "regex", "regex": "^(linux)|(osx)|(win)$", "comma_separated": true}], ["--client_labels", "Comma-separated list of client labels to filter GRR hosts on.", null] diff --git a/data/recipes/grr_hunt_osquery.json b/data/recipes/grr_hunt_osquery.json index 0d29f3e2f..f036c47ef 100644 --- a/data/recipes/grr_hunt_osquery.json +++ b/data/recipes/grr_hunt_osquery.json @@ -2,6 +2,7 @@ "name": "grr_hunt_osquery", "short_description": "Starts a GRR hunt for an Osquery flow.", "description": "Starts a GRR osquery hunt and provides the Hunt ID to the user.", + "test_params": "reason", "modules": [{ "wants": [], "name": "OsqueryCollector", @@ -38,7 +39,7 @@ ["--local_configuration_path", "Path to a local osquery configuration file.", ""], ["--configuration_content", "Osquery configuration as a JSON string.", ""], ["--file_collection_columns", "The file collection columns.", ""], - ["--timeout_millis", "Osquery timeout in milliseconds", 300000, {"format": "regex", "regex": "^\\d+$"}], + ["--timeout_millis", "Osquery timeout in milliseconds", "300000", {"format": "integer"}], ["--ignore_stderr_errors", "Ignore osquery stderr errors", false], ["--approvers", "Emails for GRR approval request.", null], ["--grr_server_url", "GRR endpoint", "http://localhost:8000", {"format": "url"}], diff --git a/data/recipes/grr_huntresults_ts.json b/data/recipes/grr_huntresults_ts.json index 6af28519f..06177e090 100644 --- a/data/recipes/grr_huntresults_ts.json +++ b/data/recipes/grr_huntresults_ts.json @@ -2,6 +2,7 @@ "name": "grr_huntresults_ts", "short_description": "Fetches the results of a GRR hunt, processes them with Plaso, and exports the results to Timesketch.", "description": "Download the results of a GRR hunt and process them.\n\n- Collect results of a hunt given its Hunt ID\n- Processes results with a local install of Plaso\n- Exports processed items to a new Timesketch sketch", + "test_params": "0123456789012345 reason", "modules": [{ "wants": [], "name": "GRRHuntDownloader", @@ -38,7 +39,7 @@ "args": [ ["hunt_id", "ID of GRR Hunt results to fetch.", null, {"format": "regex", "comma_separated": true, "regex": "^[0-9A-F]{16}$"}], ["reason", "Reason for exporting hunt (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/grr_osquery_flow.json b/data/recipes/grr_osquery_flow.json index 58531a6c5..a86798870 100644 --- a/data/recipes/grr_osquery_flow.json +++ b/data/recipes/grr_osquery_flow.json @@ -2,6 +2,7 @@ "name": "grr_osquery_flow", "short_description": "Runs osquery on GRR hosts and save any results to local CSV files.", "description": "Runs osquery on GRR hosts and save any results to local CSV files.", + "test_params": "reason C.0123456789012345", "modules": [{ "wants": [], "name": "OsqueryCollector", @@ -39,7 +40,7 @@ ["--local_configuration_path", "Path to a local osquery configuration file.", ""], ["--configuration_content", "Osquery configuration as a JSON string.", ""], ["--file_collection_columns", "The file collection columns.", ""], - ["--timeout_millis", "Osquery timeout in milliseconds", 300000, {"format": "regex", "regex": "^\\d+$"}], + ["--timeout_millis", "Osquery timeout in milliseconds", "300000", {"format": "integer"}], ["--ignore_stderr_errors", "Ignore osquery stderr errors", false], ["--directory", "Directory in which to export results.", null], ["--approvers", "Emails for GRR approval request.", null], diff --git a/data/recipes/grr_timeline_ts.json b/data/recipes/grr_timeline_ts.json index 176f4efd3..9e0193003 100644 --- a/data/recipes/grr_timeline_ts.json +++ b/data/recipes/grr_timeline_ts.json @@ -2,6 +2,7 @@ "name": "grr_timeline_ts", "description": "Uses the GRR TimelineFlow to generate a filesystem timeline and exports it to Timesketch..", "short_description": "Runs a TimelineFlow on a set of GRR hosts, generating a filesystem bodyfile for each host. These bodyfiles are processed results with Plaso, and the resulting plaso files are exported to Timesketch.", + "test_params": "C.0123456789012345 root_path reason", "preflights": [], "modules": [{ "wants": [], @@ -60,7 +61,7 @@ ["reason", "Reason for collection.", null], ["--skip_offline_clients", "Whether to skip clients that are offline.", false], ["--approvers", "Comma-separated list of usernames to ask for approval.", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--grr_server_url", "GRR endpoint.", "http://localhost:8000", {"format": "url"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], diff --git a/data/recipes/grr_yarascan.json b/data/recipes/grr_yarascan.json index c56d01b70..de680a97a 100644 --- a/data/recipes/grr_yarascan.json +++ b/data/recipes/grr_yarascan.json @@ -2,6 +2,7 @@ "name": "grr_yarascan", "short_description": "Run Yara rules on hosts memory.", "description": "Run Yara rules on hosts memory.", + "test_params": "reason C.0123456789012345", "modules": [ { "wants": [], diff --git a/data/recipes/gsheets_ts.json b/data/recipes/gsheets_ts.json index c584daed6..df728acfd 100644 --- a/data/recipes/gsheets_ts.json +++ b/data/recipes/gsheets_ts.json @@ -2,6 +2,7 @@ "name": "gsheets_ts", "description": "Collects data from google sheets and outputs them to Timesketch.", "short_description": "Collects data from google sheets and outputs them to Timesketch.", + "test_params": "spreadsheet", "preflights": [], "modules": [{ "wants": [], @@ -29,7 +30,7 @@ ["spreadsheet", "ID or URL of the Google Sheet spreadsheet to collect data from.", null], ["--sheet_names", "Comma-separated list sheet names to collect date from. If not set all sheets in the spreadsheet will be parsed.", []], ["--validate_columns", "Set to True to check for mandatory columns required by Timesketch while extracting data. Set to False to ignore validation. Default is True.", true], - ["--sketch_id", "Sketch to which the timeline should be added", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Sketch to which the timeline should be added", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/plaso_ts.json b/data/recipes/plaso_ts.json index 52aa0c039..8c2178864 100644 --- a/data/recipes/plaso_ts.json +++ b/data/recipes/plaso_ts.json @@ -2,6 +2,7 @@ "name": "plaso_ts", "short_description": "Processes a list of file paths using a Plaso and export results to Timesketch.", "description": "Processes a list of file paths using Plaso and sends results to Timesketch.\n\n- Collectors collect from a path in the FS\n- Processes them with a local install of plaso\n- Exports them to a new Timesketch sketch", + "test_params": "paths", "modules": [{ "wants": [], "name": "FilesystemCollector", @@ -32,7 +33,7 @@ "args": [ ["paths", "Comma-separated list of paths to process.", null], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/upload_ts.json b/data/recipes/upload_ts.json index 0f44b5810..2980af348 100644 --- a/data/recipes/upload_ts.json +++ b/data/recipes/upload_ts.json @@ -2,6 +2,7 @@ "name": "upload_ts", "description": "Uploads a CSV or Plaso file to Timesketch.", "short_description": "Uploads a local CSV or Plaso file to Timesketch.", + "test_params": "files", "modules": [{ "wants": [], "name": "FilesystemCollector", @@ -25,7 +26,7 @@ "args": [ ["files", "Comma-separated list of paths to CSV files or Plaso storage files.", null], ["--analyzers", "Timesketch analyzers to run.", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/upload_turbinia.json b/data/recipes/upload_turbinia.json index 122ea3ddc..9af3cd6eb 100644 --- a/data/recipes/upload_turbinia.json +++ b/data/recipes/upload_turbinia.json @@ -2,6 +2,7 @@ "name": "upload_turbinia", "short_description": "Uploads arbitrary files to Turbinia and downloads results.", "description": "Uploads arbitrary files to Turbinia for processing. The recipe will wait for Turbinia to return with results and will download them back to the filesystem. The Turbinia system needs to be accessible via SSH.", + "test_params": "files", "modules": [{ "wants": [], "name": "FilesystemCollector", @@ -39,6 +40,6 @@ ["--turbinia_api", "Turbinia API server endpoint.", "http://127.0.0.1:8000"], ["--local_turbinia_results", "Directory where Turbinia results will be downloaded to.", null], ["--turbinia_zone", "The GCP zone the disk to process and Turbinia workers are in.", "us-central1-f", {"format": "gcp_zone"}], - ["--sketch_id", "Timesketch sketch ID.", null, {"format": "regex", "regex": "^\\d+$"}] + ["--sketch_id", "Timesketch sketch ID.", null, {"format": "integer"}] ] } diff --git a/data/recipes/upload_web_ts.json b/data/recipes/upload_web_ts.json index f513c6c1e..efdd80a88 100644 --- a/data/recipes/upload_web_ts.json +++ b/data/recipes/upload_web_ts.json @@ -2,6 +2,7 @@ "name": "upload_web_ts", "short_description": "Uploads a CSV/JSONL or Plaso file to Timesketch and runs web-related Timesketch analyzers.", "description": "Uploads a CSV or Plaso file to Timesketch and runs a series of web-related analyzers on the uploaded data.\n\nThe following analyzers will run on the processed timeline: `browser_search,browser_timeframe,account_finder,phishy_domains,evtx_gap,login,win_crash,safebrowsing,chain`.", + "test_params": "files", "modules": [{ "wants": [], "name": "FilesystemCollector", @@ -39,7 +40,7 @@ }], "args": [ ["files", "Comma-separated list of paths to CSV files or Plaso storage files.", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], @@ -48,7 +49,7 @@ ["--wait_for_analyzers", "Wait for analyzers until they complete their run, if set to False the TS enhancer will be skipped.", true], ["--timesketch_include_stories", "Include story dumps in reports.", false], ["--searches_to_skip", "A comma separated list of saved searches that should not be uploaded.", null], - ["--analyzer_max_checks", "Number of wait cycles (per cycle is 3 seconds) before terminating wait for analyzers to complete.", 0, {"format": "regex", "regex": "^\\d+$"}], + ["--analyzer_max_checks", "Number of wait cycles (per cycle is 3 seconds) before terminating wait for analyzers to complete.", "0", {"format": "integer"}], ["--aggregations_to_skip", "A comma separated list of aggregation names that should not be uploaded.", null] ] } diff --git a/data/recipes/vt_evtx.json b/data/recipes/vt_evtx.json index da1543ce4..375fe4474 100644 --- a/data/recipes/vt_evtx.json +++ b/data/recipes/vt_evtx.json @@ -2,6 +2,7 @@ "name": "vt_evtx", "short_description": "Downloads the EVTX files from VirusTotal for a specific hash.", "description": "Downloads the EVTX files from VirusTotal sandbox run for a specific hash, processes it with Plaso.", + "test_params": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 directory", "modules": [ { "wants": [], diff --git a/data/recipes/vt_evtx_ts.json b/data/recipes/vt_evtx_ts.json index 414dce876..ff9a58cd5 100644 --- a/data/recipes/vt_evtx_ts.json +++ b/data/recipes/vt_evtx_ts.json @@ -2,6 +2,7 @@ "name": "vt_evtx_ts", "short_description": "Downloads the EVTX from VirusTotal sandbox runs for a specific hash and uploads the corresponding timeline to Timesketch.", "description": "Downloads the EVTX file generated by VirusTotal during the sandbox runs for a specific hash, processes the EVTX files with Plaso and uploads the resulting Plaso file to Timesketch.", + "test_params": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 directory", "modules": [ { "wants": [], @@ -41,7 +42,7 @@ ["directory", "Directory in which to export files.", null], ["--vt_api_key", "Virustotal API key", "admin"], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/vt_pcap.json b/data/recipes/vt_pcap.json index 602a82508..15f57163c 100644 --- a/data/recipes/vt_pcap.json +++ b/data/recipes/vt_pcap.json @@ -2,6 +2,7 @@ "name": "vt_pcap", "short_description": "Downloads the PCAP from VirusTotal for a specific hash.", "description": "Downloads the PCAP files generated from VirusTotal sandbox's run for a specific hash.", + "test_params": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 directory", "modules": [ { "wants": [], diff --git a/data/recipes/workspace_collect.json b/data/recipes/workspace_collect.json index bbfb17e40..60d4e45ca 100644 --- a/data/recipes/workspace_collect.json +++ b/data/recipes/workspace_collect.json @@ -2,6 +2,7 @@ "name": "workspace_logging_collect", "short_description": "Collects Workspace Audit logs and dumps them on the filesystem.", "description": "Collects logs from Workspace Audit log and dumps them on the filesystem.\n\nSee https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list#ApplicationName for a list of application names.\n\nFor filters, see https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list.", + "test_params": "applicationname", "preflights": [], "modules": [{ "wants": [], diff --git a/data/recipes/workspace_meet_ts.json b/data/recipes/workspace_meet_ts.json index ef6e1abcb..bf2506548 100644 --- a/data/recipes/workspace_meet_ts.json +++ b/data/recipes/workspace_meet_ts.json @@ -1,7 +1,8 @@ { "name": "workspace_meet_ts", "short_description": "Collects Meet records and adds them to Timesketch", - "description": "Collects Google Workspace audit records for a Google Meet and adds them to Timesketch.", + "description": "Collects Google Workspace audit records or a Google Meet and adds them to Timesketch.", + "test_params": "abcdefghij", "preflights": [], "modules": [ { @@ -40,7 +41,7 @@ ["--start_time", "Start time.", null, {"format": "datetime", "before": "@end_time"}], ["--end_time", "End time.", null, {"format": "datetime_end", "after": "@start_time"}], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/workspace_user_activity_ts.json b/data/recipes/workspace_user_activity_ts.json index 6c41f89ae..d9eff7ec1 100644 --- a/data/recipes/workspace_user_activity_ts.json +++ b/data/recipes/workspace_user_activity_ts.json @@ -2,6 +2,7 @@ "name": "workspace_user_activity_ts", "short_description": "Collects records for a Google Workspace user and adds them to Timesketch", "description": "Collects records for a Google Workspace user and adds them to Timesketch.\n\nCollects logs for the following apps: `Login`, `Drive`, `Token`, `Chrome`, `CAA`, `DataStudio`, `GroupsEnterprise`, `Calendar`, `Chat`, `Groups`, `Meet`, `UserAccounts`.", + "test_params": "username", "preflights": [], "modules": [ { @@ -186,7 +187,7 @@ ["--end_time", "End time (yyyy-mm-ddTHH:MM:SSZ).", null, {"format": "datetime_end", "after": "@start_time"}], ["--filter_expression", "Filter expression to use to query Workspace logs. See https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list",""], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/workspace_user_device_ts.json b/data/recipes/workspace_user_device_ts.json index 68fedc2a6..a53b67237 100644 --- a/data/recipes/workspace_user_device_ts.json +++ b/data/recipes/workspace_user_device_ts.json @@ -1,6 +1,7 @@ { "name": "workspace_user_device_ts", "description": "Collects mobile (Device Audit activity) records for a Workspace user and adds them to Timesketch.", + "test_params": "username", "short_description": "Collects mobile records and adds to Timesketch", "preflights": [], "modules": [ @@ -41,7 +42,7 @@ ["--end_time", "End time.", null, {"format": "datetime_end", "after": "@start_time"}], ["--filter_expression", "Filter expression to use to query Workspace logs. See https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list",""], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/workspace_user_drive_ts.json b/data/recipes/workspace_user_drive_ts.json index d6fc83efe..de91d6e7a 100644 --- a/data/recipes/workspace_user_drive_ts.json +++ b/data/recipes/workspace_user_drive_ts.json @@ -2,6 +2,7 @@ "name": "workspace_user_drive_ts", "short_description": "Collects Drive records for a Workspace user and adds them to Timesketch", "description": "Collects Drive records for a Workspace user and adds them to Timesketch.", + "test_params": "username", "preflights": [], "modules": [ { @@ -41,7 +42,7 @@ ["--end_time", "End time.", null, {"format": "datetime_end", "after": "@start_time"}], ["--filter_expression", "Filter expression to use to query Workspace logs. See https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list",""], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/data/recipes/workspace_user_login_ts.json b/data/recipes/workspace_user_login_ts.json index cd8d9df0c..904015dad 100644 --- a/data/recipes/workspace_user_login_ts.json +++ b/data/recipes/workspace_user_login_ts.json @@ -2,6 +2,7 @@ "name": "workspace_user_login_ts", "description": "Collects login records for a Workspace user and adds them to Timesketch.", "short_description": "Collects login records and adds to Timesketch", + "test_params": "username", "preflights": [], "modules": [ { @@ -41,7 +42,7 @@ ["--end_time", "End time.", null, {"format": "datetime_end", "after": "@start_time"}], ["--filter_expression", "Filter expression to use to query Workspace logs. See https://developers.google.com/admin-sdk/reports/reference/rest/v1/activities/list",""], ["--incident_id", "Incident ID (used for Timesketch description).", null], - ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "regex", "regex": "^\\d+$"}], + ["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}], ["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"], ["--timesketch_username", "Username for Timesketch server.", null], ["--timesketch_password", "Password for Timesketch server.", null], diff --git a/dftimewolf/lib/validators/__init__.py b/dftimewolf/lib/validators/__init__.py index 184602691..a547638f4 100644 --- a/dftimewolf/lib/validators/__init__.py +++ b/dftimewolf/lib/validators/__init__.py @@ -9,3 +9,4 @@ from dftimewolf.lib.validators import regex from dftimewolf.lib.validators import subnet from dftimewolf.lib.validators import url +from dftimewolf.lib.validators import integer diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index 9fc47b0e9..f81aaaf2f 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -146,6 +146,8 @@ def _testRecipeValidators(self, recipe_name): if test_params: recipe_args = [recipe_name] + test_params self.tool.ParseArguments(recipe_args) + else: + self.fail('No test_params in recipe') self.tool._state.LoadRecipe(recipe.contents, dftimewolf_recipes.MODULES) for arg in recipe.args: @@ -156,8 +158,7 @@ def _testRecipeValidators(self, recipe_name): f'Error in {recipe.name}:{arg.switch} - ' f'Invalid validator {arg.validation_params["format"]}.') - if test_params: - self.tool.ValidateArguments() + self.tool.ValidateArguments() def testRecipeWithNestedArgs(self): """Tests that a recipe with args referenced in other args is populated.""" From 651aa53452425e09cb568240f608134399c9608a Mon Sep 17 00:00:00 2001 From: ramoj Date: Fri, 26 Jul 2024 01:21:41 +0000 Subject: [PATCH 8/9] Docstring updates --- tests/cli/main_tool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index f81aaaf2f..17851f16a 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -108,7 +108,7 @@ def testRecipeSetupArgs(self, recipe_name): self._testRecipeSetupArgs(recipe_name) def _testRecipeSetupArgs(self, recipe_name): - """Checks that all recipes pass the correct arguments to their modules.""" + """Checks that a recipes passes the correct arguments to their modules.""" # We want to access the tool's state object to load recipes and go through # modules. # pylint: disable=protected-access @@ -137,7 +137,7 @@ def testRecipeValidators(self, recipe_name): self._testRecipeValidators(recipe_name) def _testRecipeValidators(self, recipe_name): - """Tests that recipes do not specify invalid validators.""" + """Tests that a recipe does not specify invalid validators.""" # pylint: disable=protected-access self.tool._state = dftw_state.DFTimewolfState(config.Config) recipe = self.tool._recipes_manager.Recipes()[recipe_name] From f8172fa04d7da6ca2e9f0c0a60927642011f5ec7 Mon Sep 17 00:00:00 2001 From: ramoj Date: Mon, 29 Jul 2024 01:06:19 +0000 Subject: [PATCH 9/9] PR notes --- dftimewolf/lib/resources.py | 23 ++++++++++++++------ tests/cli/main_tool.py | 42 +++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 6 deletions(-) diff --git a/dftimewolf/lib/resources.py b/dftimewolf/lib/resources.py index b7753e29c..b3078b583 100644 --- a/dftimewolf/lib/resources.py +++ b/dftimewolf/lib/resources.py @@ -2,7 +2,11 @@ """Various dfTimewolf resource objects.""" import dataclasses -from typing import Any, Dict, Sequence, Optional +from typing import Any, Dict, Sequence + + +class NoTestParamsError(Exception): + """Raised when a recipe has not provided test parameters.""" @dataclasses.dataclass @@ -63,8 +67,15 @@ def GetHelpString(self) -> str: 'short_description', 'No description') return ' {0:<35s}{1:s}\n'.format(self.name, short_description) - def GetTestParams(self) -> Optional[list[str]]: - """Get the test params from a recipe.""" - if self.contents.get('test_params', None): - return str(self.contents.get('test_params', '')).split(' ') - return None + def GetTestParams(self) -> list[str]: + """Get the test params from a recipe. + + Rasies: + NoTestParamsError: If the recipe does not provide a test_params field.""" + try: + params = self.contents['test_params'] + if not params: + return [] + return str(params).split(' ') + except KeyError as e: + raise NoTestParamsError('No test parameters specified in recipe') from e diff --git a/tests/cli/main_tool.py b/tests/cli/main_tool.py index 17851f16a..5ac0eb6ac 100644 --- a/tests/cli/main_tool.py +++ b/tests/cli/main_tool.py @@ -39,6 +39,7 @@ OPTIONAL_ARG_RECIPE = { 'name': 'optional_arg_recipe', 'short_description': 'Short description.', + # This recipe deliberately has no test_params field 'preflights': [], 'modules': [], 'args': [ @@ -51,6 +52,15 @@ OPTIONAL_ARG_RECIPE_ARGS = [ resources.RecipeArgument(*arg) for arg in OPTIONAL_ARG_RECIPE['args']] +NO_ARG_RECIPE = { + 'name': 'no_arg_recipe', + 'short_description': 'Short description.', + 'test_params': '', + 'preflights': [], + 'modules': [], + 'args': [] +} + def _CreateToolObject(): """Creates a DFTimewolfTool object instance.""" @@ -160,6 +170,38 @@ def _testRecipeValidators(self, recipe_name): self.tool.ValidateArguments() + def testNoArgRecipeValidation(self): + """Tests recipe validation when there are no args.""" + # pylint: disable=protected-access + no_arg_recipe = resources.Recipe( + NO_ARG_RECIPE.__doc__, + NO_ARG_RECIPE, + []) + self.tool._state = dftw_state.DFTimewolfState(config.Config) + self.tool._recipes_manager.RegisterRecipe(no_arg_recipe) + self.tool._state.LoadRecipe(NO_ARG_RECIPE, dftimewolf_recipes.MODULES) + + test_params = no_arg_recipe.GetTestParams() + recipe_args = [no_arg_recipe.name] + test_params + + self.tool.ParseArguments(recipe_args) + self.tool.ValidateArguments() + + def testRecipeWithNoTestParams(self): + """Tests that a recipe with no test params specified generates an error.""" + # pylint: disable=protected-access + optional_arg_recipe = resources.Recipe( + OPTIONAL_ARG_RECIPE.__doc__, + OPTIONAL_ARG_RECIPE, + OPTIONAL_ARG_RECIPE_ARGS) + self.tool._state = dftw_state.DFTimewolfState(config.Config) + self.tool._recipes_manager.RegisterRecipe(optional_arg_recipe) + self.tool._state.LoadRecipe(OPTIONAL_ARG_RECIPE, dftimewolf_recipes.MODULES) + + with self.assertRaisesRegex(resources.NoTestParamsError, + 'No test parameters specified in recipe'): + optional_arg_recipe.GetTestParams() + def testRecipeWithNestedArgs(self): """Tests that a recipe with args referenced in other args is populated.""" # pylint: disable=protected-access